re PR debug/86523 (ICE in gen_member_die, at dwarf2out.c:24933 starting from r262560)
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105
106 #ifndef XCOFF_DEBUGGING_INFO
107 #define XCOFF_DEBUGGING_INFO 0
108 #endif
109
110 #ifndef HAVE_XCOFF_DWARF_EXTRAS
111 #define HAVE_XCOFF_DWARF_EXTRAS 0
112 #endif
113
114 #ifdef VMS_DEBUGGING_INFO
115 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
116
117 /* Define this macro to be a nonzero value if the directory specifications
118 which are output in the debug info should end with a separator. */
119 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
120 /* Define this macro to evaluate to a nonzero value if GCC should refrain
121 from generating indirect strings in DWARF2 debug information, for instance
122 if your target is stuck with an old version of GDB that is unable to
123 process them properly or uses VMS Debug. */
124 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
125 #else
126 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
127 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
128 #endif
129
130 /* ??? Poison these here until it can be done generically. They've been
131 totally replaced in this file; make sure it stays that way. */
132 #undef DWARF2_UNWIND_INFO
133 #undef DWARF2_FRAME_INFO
134 #if (GCC_VERSION >= 3000)
135 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
136 #endif
137
138 /* The size of the target's pointer type. */
139 #ifndef PTR_SIZE
140 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
141 #endif
142
143 /* Array of RTXes referenced by the debugging information, which therefore
144 must be kept around forever. */
145 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
146
147 /* A pointer to the base of a list of incomplete types which might be
148 completed at some later time. incomplete_types_list needs to be a
149 vec<tree, va_gc> *because we want to tell the garbage collector about
150 it. */
151 static GTY(()) vec<tree, va_gc> *incomplete_types;
152
153 /* Pointers to various DWARF2 sections. */
154 static GTY(()) section *debug_info_section;
155 static GTY(()) section *debug_skeleton_info_section;
156 static GTY(()) section *debug_abbrev_section;
157 static GTY(()) section *debug_skeleton_abbrev_section;
158 static GTY(()) section *debug_aranges_section;
159 static GTY(()) section *debug_addr_section;
160 static GTY(()) section *debug_macinfo_section;
161 static const char *debug_macinfo_section_name;
162 static unsigned macinfo_label_base = 1;
163 static GTY(()) section *debug_line_section;
164 static GTY(()) section *debug_skeleton_line_section;
165 static GTY(()) section *debug_loc_section;
166 static GTY(()) section *debug_pubnames_section;
167 static GTY(()) section *debug_pubtypes_section;
168 static GTY(()) section *debug_str_section;
169 static GTY(()) section *debug_line_str_section;
170 static GTY(()) section *debug_str_dwo_section;
171 static GTY(()) section *debug_str_offsets_section;
172 static GTY(()) section *debug_ranges_section;
173 static GTY(()) section *debug_frame_section;
174
175 /* Maximum size (in bytes) of an artificially generated label. */
176 #define MAX_ARTIFICIAL_LABEL_BYTES 40
177
178 /* According to the (draft) DWARF 3 specification, the initial length
179 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
180 bytes are 0xffffffff, followed by the length stored in the next 8
181 bytes.
182
183 However, the SGI/MIPS ABI uses an initial length which is equal to
184 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
185
186 #ifndef DWARF_INITIAL_LENGTH_SIZE
187 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
188 #endif
189
190 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
191 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
192 #endif
193
194 /* Round SIZE up to the nearest BOUNDARY. */
195 #define DWARF_ROUND(SIZE,BOUNDARY) \
196 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
197
198 /* CIE identifier. */
199 #if HOST_BITS_PER_WIDE_INT >= 64
200 #define DWARF_CIE_ID \
201 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
202 #else
203 #define DWARF_CIE_ID DW_CIE_ID
204 #endif
205
206
207 /* A vector for a table that contains frame description
208 information for each routine. */
209 #define NOT_INDEXED (-1U)
210 #define NO_INDEX_ASSIGNED (-2U)
211
212 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
213
214 struct GTY((for_user)) indirect_string_node {
215 const char *str;
216 unsigned int refcount;
217 enum dwarf_form form;
218 char *label;
219 unsigned int index;
220 };
221
222 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
223 {
224 typedef const char *compare_type;
225
226 static hashval_t hash (indirect_string_node *);
227 static bool equal (indirect_string_node *, const char *);
228 };
229
230 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
231
232 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
233
234 /* With split_debug_info, both the comp_dir and dwo_name go in the
235 main object file, rather than the dwo, similar to the force_direct
236 parameter elsewhere but with additional complications:
237
238 1) The string is needed in both the main object file and the dwo.
239 That is, the comp_dir and dwo_name will appear in both places.
240
241 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
242 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
243
244 3) GCC chooses the form to use late, depending on the size and
245 reference count.
246
247 Rather than forcing the all debug string handling functions and
248 callers to deal with these complications, simply use a separate,
249 special-cased string table for any attribute that should go in the
250 main object file. This limits the complexity to just the places
251 that need it. */
252
253 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
254
255 static GTY(()) int dw2_string_counter;
256
257 /* True if the compilation unit places functions in more than one section. */
258 static GTY(()) bool have_multiple_function_sections = false;
259
260 /* Whether the default text and cold text sections have been used at all. */
261 static GTY(()) bool text_section_used = false;
262 static GTY(()) bool cold_text_section_used = false;
263
264 /* The default cold text section. */
265 static GTY(()) section *cold_text_section;
266
267 /* The DIE for C++14 'auto' in a function return type. */
268 static GTY(()) dw_die_ref auto_die;
269
270 /* The DIE for C++14 'decltype(auto)' in a function return type. */
271 static GTY(()) dw_die_ref decltype_auto_die;
272
273 /* Forward declarations for functions defined in this file. */
274
275 static void output_call_frame_info (int);
276 static void dwarf2out_note_section_used (void);
277
278 /* Personality decl of current unit. Used only when assembler does not support
279 personality CFI. */
280 static GTY(()) rtx current_unit_personality;
281
282 /* Whether an eh_frame section is required. */
283 static GTY(()) bool do_eh_frame = false;
284
285 /* .debug_rnglists next index. */
286 static unsigned int rnglist_idx;
287
288 /* Data and reference forms for relocatable data. */
289 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
290 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
291
292 #ifndef DEBUG_FRAME_SECTION
293 #define DEBUG_FRAME_SECTION ".debug_frame"
294 #endif
295
296 #ifndef FUNC_BEGIN_LABEL
297 #define FUNC_BEGIN_LABEL "LFB"
298 #endif
299
300 #ifndef FUNC_END_LABEL
301 #define FUNC_END_LABEL "LFE"
302 #endif
303
304 #ifndef PROLOGUE_END_LABEL
305 #define PROLOGUE_END_LABEL "LPE"
306 #endif
307
308 #ifndef EPILOGUE_BEGIN_LABEL
309 #define EPILOGUE_BEGIN_LABEL "LEB"
310 #endif
311
312 #ifndef FRAME_BEGIN_LABEL
313 #define FRAME_BEGIN_LABEL "Lframe"
314 #endif
315 #define CIE_AFTER_SIZE_LABEL "LSCIE"
316 #define CIE_END_LABEL "LECIE"
317 #define FDE_LABEL "LSFDE"
318 #define FDE_AFTER_SIZE_LABEL "LASFDE"
319 #define FDE_END_LABEL "LEFDE"
320 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
321 #define LINE_NUMBER_END_LABEL "LELT"
322 #define LN_PROLOG_AS_LABEL "LASLTP"
323 #define LN_PROLOG_END_LABEL "LELTP"
324 #define DIE_LABEL_PREFIX "DW"
325 \f
326 /* Match the base name of a file to the base name of a compilation unit. */
327
328 static int
329 matches_main_base (const char *path)
330 {
331 /* Cache the last query. */
332 static const char *last_path = NULL;
333 static int last_match = 0;
334 if (path != last_path)
335 {
336 const char *base;
337 int length = base_of_path (path, &base);
338 last_path = path;
339 last_match = (length == main_input_baselength
340 && memcmp (base, main_input_basename, length) == 0);
341 }
342 return last_match;
343 }
344
345 #ifdef DEBUG_DEBUG_STRUCT
346
347 static int
348 dump_struct_debug (tree type, enum debug_info_usage usage,
349 enum debug_struct_file criterion, int generic,
350 int matches, int result)
351 {
352 /* Find the type name. */
353 tree type_decl = TYPE_STUB_DECL (type);
354 tree t = type_decl;
355 const char *name = 0;
356 if (TREE_CODE (t) == TYPE_DECL)
357 t = DECL_NAME (t);
358 if (t)
359 name = IDENTIFIER_POINTER (t);
360
361 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
362 criterion,
363 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
364 matches ? "bas" : "hdr",
365 generic ? "gen" : "ord",
366 usage == DINFO_USAGE_DFN ? ";" :
367 usage == DINFO_USAGE_DIR_USE ? "." : "*",
368 result,
369 (void*) type_decl, name);
370 return result;
371 }
372 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
373 dump_struct_debug (type, usage, criterion, generic, matches, result)
374
375 #else
376
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 (result)
379
380 #endif
381
382 /* Get the number of HOST_WIDE_INTs needed to represent the precision
383 of the number. Some constants have a large uniform precision, so
384 we get the precision needed for the actual value of the number. */
385
386 static unsigned int
387 get_full_len (const wide_int &op)
388 {
389 int prec = wi::min_precision (op, UNSIGNED);
390 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
391 / HOST_BITS_PER_WIDE_INT);
392 }
393
394 static bool
395 should_emit_struct_debug (tree type, enum debug_info_usage usage)
396 {
397 enum debug_struct_file criterion;
398 tree type_decl;
399 bool generic = lang_hooks.types.generic_p (type);
400
401 if (generic)
402 criterion = debug_struct_generic[usage];
403 else
404 criterion = debug_struct_ordinary[usage];
405
406 if (criterion == DINFO_STRUCT_FILE_NONE)
407 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
408 if (criterion == DINFO_STRUCT_FILE_ANY)
409 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
410
411 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
412
413 if (type_decl != NULL)
414 {
415 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
416 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
417
418 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
419 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
420 }
421
422 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
423 }
424 \f
425 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
426 switch to the data section instead, and write out a synthetic start label
427 for collect2 the first time around. */
428
429 static void
430 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
431 {
432 if (eh_frame_section == 0)
433 {
434 int flags;
435
436 if (EH_TABLES_CAN_BE_READ_ONLY)
437 {
438 int fde_encoding;
439 int per_encoding;
440 int lsda_encoding;
441
442 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
443 /*global=*/0);
444 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
445 /*global=*/1);
446 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
447 /*global=*/0);
448 flags = ((! flag_pic
449 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
450 && (fde_encoding & 0x70) != DW_EH_PE_aligned
451 && (per_encoding & 0x70) != DW_EH_PE_absptr
452 && (per_encoding & 0x70) != DW_EH_PE_aligned
453 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
454 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
455 ? 0 : SECTION_WRITE);
456 }
457 else
458 flags = SECTION_WRITE;
459
460 #ifdef EH_FRAME_SECTION_NAME
461 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
462 #else
463 eh_frame_section = ((flags == SECTION_WRITE)
464 ? data_section : readonly_data_section);
465 #endif /* EH_FRAME_SECTION_NAME */
466 }
467
468 switch_to_section (eh_frame_section);
469
470 #ifdef EH_FRAME_THROUGH_COLLECT2
471 /* We have no special eh_frame section. Emit special labels to guide
472 collect2. */
473 if (!back)
474 {
475 tree label = get_file_function_name ("F");
476 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
477 targetm.asm_out.globalize_label (asm_out_file,
478 IDENTIFIER_POINTER (label));
479 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
480 }
481 #endif
482 }
483
484 /* Switch [BACK] to the eh or debug frame table section, depending on
485 FOR_EH. */
486
487 static void
488 switch_to_frame_table_section (int for_eh, bool back)
489 {
490 if (for_eh)
491 switch_to_eh_frame_section (back);
492 else
493 {
494 if (!debug_frame_section)
495 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
496 SECTION_DEBUG, NULL);
497 switch_to_section (debug_frame_section);
498 }
499 }
500
501 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
502
503 enum dw_cfi_oprnd_type
504 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
505 {
506 switch (cfi)
507 {
508 case DW_CFA_nop:
509 case DW_CFA_GNU_window_save:
510 case DW_CFA_remember_state:
511 case DW_CFA_restore_state:
512 return dw_cfi_oprnd_unused;
513
514 case DW_CFA_set_loc:
515 case DW_CFA_advance_loc1:
516 case DW_CFA_advance_loc2:
517 case DW_CFA_advance_loc4:
518 case DW_CFA_MIPS_advance_loc8:
519 return dw_cfi_oprnd_addr;
520
521 case DW_CFA_offset:
522 case DW_CFA_offset_extended:
523 case DW_CFA_def_cfa:
524 case DW_CFA_offset_extended_sf:
525 case DW_CFA_def_cfa_sf:
526 case DW_CFA_restore:
527 case DW_CFA_restore_extended:
528 case DW_CFA_undefined:
529 case DW_CFA_same_value:
530 case DW_CFA_def_cfa_register:
531 case DW_CFA_register:
532 case DW_CFA_expression:
533 case DW_CFA_val_expression:
534 return dw_cfi_oprnd_reg_num;
535
536 case DW_CFA_def_cfa_offset:
537 case DW_CFA_GNU_args_size:
538 case DW_CFA_def_cfa_offset_sf:
539 return dw_cfi_oprnd_offset;
540
541 case DW_CFA_def_cfa_expression:
542 return dw_cfi_oprnd_loc;
543
544 default:
545 gcc_unreachable ();
546 }
547 }
548
549 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
550
551 enum dw_cfi_oprnd_type
552 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
553 {
554 switch (cfi)
555 {
556 case DW_CFA_def_cfa:
557 case DW_CFA_def_cfa_sf:
558 case DW_CFA_offset:
559 case DW_CFA_offset_extended_sf:
560 case DW_CFA_offset_extended:
561 return dw_cfi_oprnd_offset;
562
563 case DW_CFA_register:
564 return dw_cfi_oprnd_reg_num;
565
566 case DW_CFA_expression:
567 case DW_CFA_val_expression:
568 return dw_cfi_oprnd_loc;
569
570 case DW_CFA_def_cfa_expression:
571 return dw_cfi_oprnd_cfa_loc;
572
573 default:
574 return dw_cfi_oprnd_unused;
575 }
576 }
577
578 /* Output one FDE. */
579
580 static void
581 output_fde (dw_fde_ref fde, bool for_eh, bool second,
582 char *section_start_label, int fde_encoding, char *augmentation,
583 bool any_lsda_needed, int lsda_encoding)
584 {
585 const char *begin, *end;
586 static unsigned int j;
587 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
588
589 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
590 /* empty */ 0);
591 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
592 for_eh + j);
593 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
594 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
595 if (!XCOFF_DEBUGGING_INFO || for_eh)
596 {
597 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
598 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
599 " indicating 64-bit DWARF extension");
600 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
601 "FDE Length");
602 }
603 ASM_OUTPUT_LABEL (asm_out_file, l1);
604
605 if (for_eh)
606 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
607 else
608 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
609 debug_frame_section, "FDE CIE offset");
610
611 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
612 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
613
614 if (for_eh)
615 {
616 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
617 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
618 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
619 "FDE initial location");
620 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
621 end, begin, "FDE address range");
622 }
623 else
624 {
625 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
626 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
627 }
628
629 if (augmentation[0])
630 {
631 if (any_lsda_needed)
632 {
633 int size = size_of_encoded_value (lsda_encoding);
634
635 if (lsda_encoding == DW_EH_PE_aligned)
636 {
637 int offset = ( 4 /* Length */
638 + 4 /* CIE offset */
639 + 2 * size_of_encoded_value (fde_encoding)
640 + 1 /* Augmentation size */ );
641 int pad = -offset & (PTR_SIZE - 1);
642
643 size += pad;
644 gcc_assert (size_of_uleb128 (size) == 1);
645 }
646
647 dw2_asm_output_data_uleb128 (size, "Augmentation size");
648
649 if (fde->uses_eh_lsda)
650 {
651 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
652 fde->funcdef_number);
653 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
654 gen_rtx_SYMBOL_REF (Pmode, l1),
655 false,
656 "Language Specific Data Area");
657 }
658 else
659 {
660 if (lsda_encoding == DW_EH_PE_aligned)
661 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
662 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
663 "Language Specific Data Area (none)");
664 }
665 }
666 else
667 dw2_asm_output_data_uleb128 (0, "Augmentation size");
668 }
669
670 /* Loop through the Call Frame Instructions associated with this FDE. */
671 fde->dw_fde_current_label = begin;
672 {
673 size_t from, until, i;
674
675 from = 0;
676 until = vec_safe_length (fde->dw_fde_cfi);
677
678 if (fde->dw_fde_second_begin == NULL)
679 ;
680 else if (!second)
681 until = fde->dw_fde_switch_cfi_index;
682 else
683 from = fde->dw_fde_switch_cfi_index;
684
685 for (i = from; i < until; i++)
686 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
687 }
688
689 /* If we are to emit a ref/link from function bodies to their frame tables,
690 do it now. This is typically performed to make sure that tables
691 associated with functions are dragged with them and not discarded in
692 garbage collecting links. We need to do this on a per function basis to
693 cope with -ffunction-sections. */
694
695 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
696 /* Switch to the function section, emit the ref to the tables, and
697 switch *back* into the table section. */
698 switch_to_section (function_section (fde->decl));
699 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
700 switch_to_frame_table_section (for_eh, true);
701 #endif
702
703 /* Pad the FDE out to an address sized boundary. */
704 ASM_OUTPUT_ALIGN (asm_out_file,
705 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
706 ASM_OUTPUT_LABEL (asm_out_file, l2);
707
708 j += 2;
709 }
710
711 /* Return true if frame description entry FDE is needed for EH. */
712
713 static bool
714 fde_needed_for_eh_p (dw_fde_ref fde)
715 {
716 if (flag_asynchronous_unwind_tables)
717 return true;
718
719 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
720 return true;
721
722 if (fde->uses_eh_lsda)
723 return true;
724
725 /* If exceptions are enabled, we have collected nothrow info. */
726 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
727 return false;
728
729 return true;
730 }
731
732 /* Output the call frame information used to record information
733 that relates to calculating the frame pointer, and records the
734 location of saved registers. */
735
736 static void
737 output_call_frame_info (int for_eh)
738 {
739 unsigned int i;
740 dw_fde_ref fde;
741 dw_cfi_ref cfi;
742 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
743 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
744 bool any_lsda_needed = false;
745 char augmentation[6];
746 int augmentation_size;
747 int fde_encoding = DW_EH_PE_absptr;
748 int per_encoding = DW_EH_PE_absptr;
749 int lsda_encoding = DW_EH_PE_absptr;
750 int return_reg;
751 rtx personality = NULL;
752 int dw_cie_version;
753
754 /* Don't emit a CIE if there won't be any FDEs. */
755 if (!fde_vec)
756 return;
757
758 /* Nothing to do if the assembler's doing it all. */
759 if (dwarf2out_do_cfi_asm ())
760 return;
761
762 /* If we don't have any functions we'll want to unwind out of, don't emit
763 any EH unwind information. If we make FDEs linkonce, we may have to
764 emit an empty label for an FDE that wouldn't otherwise be emitted. We
765 want to avoid having an FDE kept around when the function it refers to
766 is discarded. Example where this matters: a primary function template
767 in C++ requires EH information, an explicit specialization doesn't. */
768 if (for_eh)
769 {
770 bool any_eh_needed = false;
771
772 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
773 {
774 if (fde->uses_eh_lsda)
775 any_eh_needed = any_lsda_needed = true;
776 else if (fde_needed_for_eh_p (fde))
777 any_eh_needed = true;
778 else if (TARGET_USES_WEAK_UNWIND_INFO)
779 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
780 }
781
782 if (!any_eh_needed)
783 return;
784 }
785
786 /* We're going to be generating comments, so turn on app. */
787 if (flag_debug_asm)
788 app_enable ();
789
790 /* Switch to the proper frame section, first time. */
791 switch_to_frame_table_section (for_eh, false);
792
793 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
794 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
795
796 /* Output the CIE. */
797 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
798 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
799 if (!XCOFF_DEBUGGING_INFO || for_eh)
800 {
801 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
802 dw2_asm_output_data (4, 0xffffffff,
803 "Initial length escape value indicating 64-bit DWARF extension");
804 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
805 "Length of Common Information Entry");
806 }
807 ASM_OUTPUT_LABEL (asm_out_file, l1);
808
809 /* Now that the CIE pointer is PC-relative for EH,
810 use 0 to identify the CIE. */
811 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
812 (for_eh ? 0 : DWARF_CIE_ID),
813 "CIE Identifier Tag");
814
815 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
816 use CIE version 1, unless that would produce incorrect results
817 due to overflowing the return register column. */
818 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
819 dw_cie_version = 1;
820 if (return_reg >= 256 || dwarf_version > 2)
821 dw_cie_version = 3;
822 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
823
824 augmentation[0] = 0;
825 augmentation_size = 0;
826
827 personality = current_unit_personality;
828 if (for_eh)
829 {
830 char *p;
831
832 /* Augmentation:
833 z Indicates that a uleb128 is present to size the
834 augmentation section.
835 L Indicates the encoding (and thus presence) of
836 an LSDA pointer in the FDE augmentation.
837 R Indicates a non-default pointer encoding for
838 FDE code pointers.
839 P Indicates the presence of an encoding + language
840 personality routine in the CIE augmentation. */
841
842 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
843 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
844 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
845
846 p = augmentation + 1;
847 if (personality)
848 {
849 *p++ = 'P';
850 augmentation_size += 1 + size_of_encoded_value (per_encoding);
851 assemble_external_libcall (personality);
852 }
853 if (any_lsda_needed)
854 {
855 *p++ = 'L';
856 augmentation_size += 1;
857 }
858 if (fde_encoding != DW_EH_PE_absptr)
859 {
860 *p++ = 'R';
861 augmentation_size += 1;
862 }
863 if (p > augmentation + 1)
864 {
865 augmentation[0] = 'z';
866 *p = '\0';
867 }
868
869 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
870 if (personality && per_encoding == DW_EH_PE_aligned)
871 {
872 int offset = ( 4 /* Length */
873 + 4 /* CIE Id */
874 + 1 /* CIE version */
875 + strlen (augmentation) + 1 /* Augmentation */
876 + size_of_uleb128 (1) /* Code alignment */
877 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
878 + 1 /* RA column */
879 + 1 /* Augmentation size */
880 + 1 /* Personality encoding */ );
881 int pad = -offset & (PTR_SIZE - 1);
882
883 augmentation_size += pad;
884
885 /* Augmentations should be small, so there's scarce need to
886 iterate for a solution. Die if we exceed one uleb128 byte. */
887 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
888 }
889 }
890
891 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
892 if (dw_cie_version >= 4)
893 {
894 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
895 dw2_asm_output_data (1, 0, "CIE Segment Size");
896 }
897 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
898 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
899 "CIE Data Alignment Factor");
900
901 if (dw_cie_version == 1)
902 dw2_asm_output_data (1, return_reg, "CIE RA Column");
903 else
904 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
905
906 if (augmentation[0])
907 {
908 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
909 if (personality)
910 {
911 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
912 eh_data_format_name (per_encoding));
913 dw2_asm_output_encoded_addr_rtx (per_encoding,
914 personality,
915 true, NULL);
916 }
917
918 if (any_lsda_needed)
919 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
920 eh_data_format_name (lsda_encoding));
921
922 if (fde_encoding != DW_EH_PE_absptr)
923 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
924 eh_data_format_name (fde_encoding));
925 }
926
927 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
928 output_cfi (cfi, NULL, for_eh);
929
930 /* Pad the CIE out to an address sized boundary. */
931 ASM_OUTPUT_ALIGN (asm_out_file,
932 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
933 ASM_OUTPUT_LABEL (asm_out_file, l2);
934
935 /* Loop through all of the FDE's. */
936 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
937 {
938 unsigned int k;
939
940 /* Don't emit EH unwind info for leaf functions that don't need it. */
941 if (for_eh && !fde_needed_for_eh_p (fde))
942 continue;
943
944 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
945 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
946 augmentation, any_lsda_needed, lsda_encoding);
947 }
948
949 if (for_eh && targetm.terminate_dw2_eh_frame_info)
950 dw2_asm_output_data (4, 0, "End of Table");
951
952 /* Turn off app to make assembly quicker. */
953 if (flag_debug_asm)
954 app_disable ();
955 }
956
957 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
958
959 static void
960 dwarf2out_do_cfi_startproc (bool second)
961 {
962 int enc;
963 rtx ref;
964
965 fprintf (asm_out_file, "\t.cfi_startproc\n");
966
967 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
968 eh unwinders. */
969 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
970 return;
971
972 rtx personality = get_personality_function (current_function_decl);
973
974 if (personality)
975 {
976 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
977 ref = personality;
978
979 /* ??? The GAS support isn't entirely consistent. We have to
980 handle indirect support ourselves, but PC-relative is done
981 in the assembler. Further, the assembler can't handle any
982 of the weirder relocation types. */
983 if (enc & DW_EH_PE_indirect)
984 ref = dw2_force_const_mem (ref, true);
985
986 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
987 output_addr_const (asm_out_file, ref);
988 fputc ('\n', asm_out_file);
989 }
990
991 if (crtl->uses_eh_lsda)
992 {
993 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
994
995 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
996 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
997 current_function_funcdef_no);
998 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
999 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1000
1001 if (enc & DW_EH_PE_indirect)
1002 ref = dw2_force_const_mem (ref, true);
1003
1004 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1005 output_addr_const (asm_out_file, ref);
1006 fputc ('\n', asm_out_file);
1007 }
1008 }
1009
1010 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1011 this allocation may be done before pass_final. */
1012
1013 dw_fde_ref
1014 dwarf2out_alloc_current_fde (void)
1015 {
1016 dw_fde_ref fde;
1017
1018 fde = ggc_cleared_alloc<dw_fde_node> ();
1019 fde->decl = current_function_decl;
1020 fde->funcdef_number = current_function_funcdef_no;
1021 fde->fde_index = vec_safe_length (fde_vec);
1022 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1023 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1024 fde->nothrow = crtl->nothrow;
1025 fde->drap_reg = INVALID_REGNUM;
1026 fde->vdrap_reg = INVALID_REGNUM;
1027
1028 /* Record the FDE associated with this function. */
1029 cfun->fde = fde;
1030 vec_safe_push (fde_vec, fde);
1031
1032 return fde;
1033 }
1034
1035 /* Output a marker (i.e. a label) for the beginning of a function, before
1036 the prologue. */
1037
1038 void
1039 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1040 unsigned int column ATTRIBUTE_UNUSED,
1041 const char *file ATTRIBUTE_UNUSED)
1042 {
1043 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1044 char * dup_label;
1045 dw_fde_ref fde;
1046 section *fnsec;
1047 bool do_frame;
1048
1049 current_function_func_begin_label = NULL;
1050
1051 do_frame = dwarf2out_do_frame ();
1052
1053 /* ??? current_function_func_begin_label is also used by except.c for
1054 call-site information. We must emit this label if it might be used. */
1055 if (!do_frame
1056 && (!flag_exceptions
1057 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1058 return;
1059
1060 fnsec = function_section (current_function_decl);
1061 switch_to_section (fnsec);
1062 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1063 current_function_funcdef_no);
1064 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1065 current_function_funcdef_no);
1066 dup_label = xstrdup (label);
1067 current_function_func_begin_label = dup_label;
1068
1069 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1070 if (!do_frame)
1071 return;
1072
1073 /* Unlike the debug version, the EH version of frame unwind info is a per-
1074 function setting so we need to record whether we need it for the unit. */
1075 do_eh_frame |= dwarf2out_do_eh_frame ();
1076
1077 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1078 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1079 would include pass_dwarf2_frame. If we've not created the FDE yet,
1080 do so now. */
1081 fde = cfun->fde;
1082 if (fde == NULL)
1083 fde = dwarf2out_alloc_current_fde ();
1084
1085 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1086 fde->dw_fde_begin = dup_label;
1087 fde->dw_fde_current_label = dup_label;
1088 fde->in_std_section = (fnsec == text_section
1089 || (cold_text_section && fnsec == cold_text_section));
1090
1091 /* We only want to output line number information for the genuine dwarf2
1092 prologue case, not the eh frame case. */
1093 #ifdef DWARF2_DEBUGGING_INFO
1094 if (file)
1095 dwarf2out_source_line (line, column, file, 0, true);
1096 #endif
1097
1098 if (dwarf2out_do_cfi_asm ())
1099 dwarf2out_do_cfi_startproc (false);
1100 else
1101 {
1102 rtx personality = get_personality_function (current_function_decl);
1103 if (!current_unit_personality)
1104 current_unit_personality = personality;
1105
1106 /* We cannot keep a current personality per function as without CFI
1107 asm, at the point where we emit the CFI data, there is no current
1108 function anymore. */
1109 if (personality && current_unit_personality != personality)
1110 sorry ("multiple EH personalities are supported only with assemblers "
1111 "supporting .cfi_personality directive");
1112 }
1113 }
1114
1115 /* Output a marker (i.e. a label) for the end of the generated code
1116 for a function prologue. This gets called *after* the prologue code has
1117 been generated. */
1118
1119 void
1120 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1121 const char *file ATTRIBUTE_UNUSED)
1122 {
1123 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1124
1125 /* Output a label to mark the endpoint of the code generated for this
1126 function. */
1127 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1128 current_function_funcdef_no);
1129 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1130 current_function_funcdef_no);
1131 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1132 }
1133
1134 /* Output a marker (i.e. a label) for the beginning of the generated code
1135 for a function epilogue. This gets called *before* the prologue code has
1136 been generated. */
1137
1138 void
1139 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1140 const char *file ATTRIBUTE_UNUSED)
1141 {
1142 dw_fde_ref fde = cfun->fde;
1143 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1144
1145 if (fde->dw_fde_vms_begin_epilogue)
1146 return;
1147
1148 /* Output a label to mark the endpoint of the code generated for this
1149 function. */
1150 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1151 current_function_funcdef_no);
1152 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1153 current_function_funcdef_no);
1154 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1155 }
1156
1157 /* Output a marker (i.e. a label) for the absolute end of the generated code
1158 for a function definition. This gets called *after* the epilogue code has
1159 been generated. */
1160
1161 void
1162 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1163 const char *file ATTRIBUTE_UNUSED)
1164 {
1165 dw_fde_ref fde;
1166 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1167
1168 last_var_location_insn = NULL;
1169 cached_next_real_insn = NULL;
1170
1171 if (dwarf2out_do_cfi_asm ())
1172 fprintf (asm_out_file, "\t.cfi_endproc\n");
1173
1174 /* Output a label to mark the endpoint of the code generated for this
1175 function. */
1176 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1177 current_function_funcdef_no);
1178 ASM_OUTPUT_LABEL (asm_out_file, label);
1179 fde = cfun->fde;
1180 gcc_assert (fde != NULL);
1181 if (fde->dw_fde_second_begin == NULL)
1182 fde->dw_fde_end = xstrdup (label);
1183 }
1184
1185 void
1186 dwarf2out_frame_finish (void)
1187 {
1188 /* Output call frame information. */
1189 if (targetm.debug_unwind_info () == UI_DWARF2)
1190 output_call_frame_info (0);
1191
1192 /* Output another copy for the unwinder. */
1193 if (do_eh_frame)
1194 output_call_frame_info (1);
1195 }
1196
1197 /* Note that the current function section is being used for code. */
1198
1199 static void
1200 dwarf2out_note_section_used (void)
1201 {
1202 section *sec = current_function_section ();
1203 if (sec == text_section)
1204 text_section_used = true;
1205 else if (sec == cold_text_section)
1206 cold_text_section_used = true;
1207 }
1208
1209 static void var_location_switch_text_section (void);
1210 static void set_cur_line_info_table (section *);
1211
1212 void
1213 dwarf2out_switch_text_section (void)
1214 {
1215 section *sect;
1216 dw_fde_ref fde = cfun->fde;
1217
1218 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1219
1220 if (!in_cold_section_p)
1221 {
1222 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1223 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1224 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1225 }
1226 else
1227 {
1228 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1229 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1230 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1231 }
1232 have_multiple_function_sections = true;
1233
1234 /* There is no need to mark used sections when not debugging. */
1235 if (cold_text_section != NULL)
1236 dwarf2out_note_section_used ();
1237
1238 if (dwarf2out_do_cfi_asm ())
1239 fprintf (asm_out_file, "\t.cfi_endproc\n");
1240
1241 /* Now do the real section switch. */
1242 sect = current_function_section ();
1243 switch_to_section (sect);
1244
1245 fde->second_in_std_section
1246 = (sect == text_section
1247 || (cold_text_section && sect == cold_text_section));
1248
1249 if (dwarf2out_do_cfi_asm ())
1250 dwarf2out_do_cfi_startproc (true);
1251
1252 var_location_switch_text_section ();
1253
1254 if (cold_text_section != NULL)
1255 set_cur_line_info_table (sect);
1256 }
1257 \f
1258 /* And now, the subset of the debugging information support code necessary
1259 for emitting location expressions. */
1260
1261 /* Data about a single source file. */
1262 struct GTY((for_user)) dwarf_file_data {
1263 const char * filename;
1264 int emitted_number;
1265 };
1266
1267 /* Describe an entry into the .debug_addr section. */
1268
1269 enum ate_kind {
1270 ate_kind_rtx,
1271 ate_kind_rtx_dtprel,
1272 ate_kind_label
1273 };
1274
1275 struct GTY((for_user)) addr_table_entry {
1276 enum ate_kind kind;
1277 unsigned int refcount;
1278 unsigned int index;
1279 union addr_table_entry_struct_union
1280 {
1281 rtx GTY ((tag ("0"))) rtl;
1282 char * GTY ((tag ("1"))) label;
1283 }
1284 GTY ((desc ("%1.kind"))) addr;
1285 };
1286
1287 typedef unsigned int var_loc_view;
1288
1289 /* Location lists are ranges + location descriptions for that range,
1290 so you can track variables that are in different places over
1291 their entire life. */
1292 typedef struct GTY(()) dw_loc_list_struct {
1293 dw_loc_list_ref dw_loc_next;
1294 const char *begin; /* Label and addr_entry for start of range */
1295 addr_table_entry *begin_entry;
1296 const char *end; /* Label for end of range */
1297 char *ll_symbol; /* Label for beginning of location list.
1298 Only on head of list. */
1299 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1300 const char *section; /* Section this loclist is relative to */
1301 dw_loc_descr_ref expr;
1302 var_loc_view vbegin, vend;
1303 hashval_t hash;
1304 /* True if all addresses in this and subsequent lists are known to be
1305 resolved. */
1306 bool resolved_addr;
1307 /* True if this list has been replaced by dw_loc_next. */
1308 bool replaced;
1309 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1310 section. */
1311 unsigned char emitted : 1;
1312 /* True if hash field is index rather than hash value. */
1313 unsigned char num_assigned : 1;
1314 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1315 unsigned char offset_emitted : 1;
1316 /* True if note_variable_value_in_expr has been called on it. */
1317 unsigned char noted_variable_value : 1;
1318 /* True if the range should be emitted even if begin and end
1319 are the same. */
1320 bool force;
1321 } dw_loc_list_node;
1322
1323 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1324 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1325
1326 /* Convert a DWARF stack opcode into its string name. */
1327
1328 static const char *
1329 dwarf_stack_op_name (unsigned int op)
1330 {
1331 const char *name = get_DW_OP_name (op);
1332
1333 if (name != NULL)
1334 return name;
1335
1336 return "OP_<unknown>";
1337 }
1338
1339 /* Return TRUE iff we're to output location view lists as a separate
1340 attribute next to the location lists, as an extension compatible
1341 with DWARF 2 and above. */
1342
1343 static inline bool
1344 dwarf2out_locviews_in_attribute ()
1345 {
1346 return debug_variable_location_views == 1;
1347 }
1348
1349 /* Return TRUE iff we're to output location view lists as part of the
1350 location lists, as proposed for standardization after DWARF 5. */
1351
1352 static inline bool
1353 dwarf2out_locviews_in_loclist ()
1354 {
1355 #ifndef DW_LLE_view_pair
1356 return false;
1357 #else
1358 return debug_variable_location_views == -1;
1359 #endif
1360 }
1361
1362 /* Return a pointer to a newly allocated location description. Location
1363 descriptions are simple expression terms that can be strung
1364 together to form more complicated location (address) descriptions. */
1365
1366 static inline dw_loc_descr_ref
1367 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1368 unsigned HOST_WIDE_INT oprnd2)
1369 {
1370 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1371
1372 descr->dw_loc_opc = op;
1373 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1374 descr->dw_loc_oprnd1.val_entry = NULL;
1375 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1376 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1377 descr->dw_loc_oprnd2.val_entry = NULL;
1378 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1379
1380 return descr;
1381 }
1382
1383 /* Add a location description term to a location description expression. */
1384
1385 static inline void
1386 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1387 {
1388 dw_loc_descr_ref *d;
1389
1390 /* Find the end of the chain. */
1391 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1392 ;
1393
1394 *d = descr;
1395 }
1396
1397 /* Compare two location operands for exact equality. */
1398
1399 static bool
1400 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1401 {
1402 if (a->val_class != b->val_class)
1403 return false;
1404 switch (a->val_class)
1405 {
1406 case dw_val_class_none:
1407 return true;
1408 case dw_val_class_addr:
1409 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1410
1411 case dw_val_class_offset:
1412 case dw_val_class_unsigned_const:
1413 case dw_val_class_const:
1414 case dw_val_class_unsigned_const_implicit:
1415 case dw_val_class_const_implicit:
1416 case dw_val_class_range_list:
1417 /* These are all HOST_WIDE_INT, signed or unsigned. */
1418 return a->v.val_unsigned == b->v.val_unsigned;
1419
1420 case dw_val_class_loc:
1421 return a->v.val_loc == b->v.val_loc;
1422 case dw_val_class_loc_list:
1423 return a->v.val_loc_list == b->v.val_loc_list;
1424 case dw_val_class_view_list:
1425 return a->v.val_view_list == b->v.val_view_list;
1426 case dw_val_class_die_ref:
1427 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1428 case dw_val_class_fde_ref:
1429 return a->v.val_fde_index == b->v.val_fde_index;
1430 case dw_val_class_symview:
1431 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1432 case dw_val_class_lbl_id:
1433 case dw_val_class_lineptr:
1434 case dw_val_class_macptr:
1435 case dw_val_class_loclistsptr:
1436 case dw_val_class_high_pc:
1437 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1438 case dw_val_class_str:
1439 return a->v.val_str == b->v.val_str;
1440 case dw_val_class_flag:
1441 return a->v.val_flag == b->v.val_flag;
1442 case dw_val_class_file:
1443 case dw_val_class_file_implicit:
1444 return a->v.val_file == b->v.val_file;
1445 case dw_val_class_decl_ref:
1446 return a->v.val_decl_ref == b->v.val_decl_ref;
1447
1448 case dw_val_class_const_double:
1449 return (a->v.val_double.high == b->v.val_double.high
1450 && a->v.val_double.low == b->v.val_double.low);
1451
1452 case dw_val_class_wide_int:
1453 return *a->v.val_wide == *b->v.val_wide;
1454
1455 case dw_val_class_vec:
1456 {
1457 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1458 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1459
1460 return (a_len == b_len
1461 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1462 }
1463
1464 case dw_val_class_data8:
1465 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1466
1467 case dw_val_class_vms_delta:
1468 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1469 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1470
1471 case dw_val_class_discr_value:
1472 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1473 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1474 case dw_val_class_discr_list:
1475 /* It makes no sense comparing two discriminant value lists. */
1476 return false;
1477 }
1478 gcc_unreachable ();
1479 }
1480
1481 /* Compare two location atoms for exact equality. */
1482
1483 static bool
1484 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1485 {
1486 if (a->dw_loc_opc != b->dw_loc_opc)
1487 return false;
1488
1489 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1490 address size, but since we always allocate cleared storage it
1491 should be zero for other types of locations. */
1492 if (a->dtprel != b->dtprel)
1493 return false;
1494
1495 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1496 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1497 }
1498
1499 /* Compare two complete location expressions for exact equality. */
1500
1501 bool
1502 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1503 {
1504 while (1)
1505 {
1506 if (a == b)
1507 return true;
1508 if (a == NULL || b == NULL)
1509 return false;
1510 if (!loc_descr_equal_p_1 (a, b))
1511 return false;
1512
1513 a = a->dw_loc_next;
1514 b = b->dw_loc_next;
1515 }
1516 }
1517
1518
1519 /* Add a constant POLY_OFFSET to a location expression. */
1520
1521 static void
1522 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1523 {
1524 dw_loc_descr_ref loc;
1525 HOST_WIDE_INT *p;
1526
1527 gcc_assert (*list_head != NULL);
1528
1529 if (known_eq (poly_offset, 0))
1530 return;
1531
1532 /* Find the end of the chain. */
1533 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1534 ;
1535
1536 HOST_WIDE_INT offset;
1537 if (!poly_offset.is_constant (&offset))
1538 {
1539 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1540 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1541 return;
1542 }
1543
1544 p = NULL;
1545 if (loc->dw_loc_opc == DW_OP_fbreg
1546 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1547 p = &loc->dw_loc_oprnd1.v.val_int;
1548 else if (loc->dw_loc_opc == DW_OP_bregx)
1549 p = &loc->dw_loc_oprnd2.v.val_int;
1550
1551 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1552 offset. Don't optimize if an signed integer overflow would happen. */
1553 if (p != NULL
1554 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1555 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1556 *p += offset;
1557
1558 else if (offset > 0)
1559 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1560
1561 else
1562 {
1563 loc->dw_loc_next
1564 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1565 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1566 }
1567 }
1568
1569 /* Return a pointer to a newly allocated location description for
1570 REG and OFFSET. */
1571
1572 static inline dw_loc_descr_ref
1573 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1574 {
1575 HOST_WIDE_INT const_offset;
1576 if (offset.is_constant (&const_offset))
1577 {
1578 if (reg <= 31)
1579 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1580 const_offset, 0);
1581 else
1582 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1583 }
1584 else
1585 {
1586 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1587 loc_descr_plus_const (&ret, offset);
1588 return ret;
1589 }
1590 }
1591
1592 /* Add a constant OFFSET to a location list. */
1593
1594 static void
1595 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1596 {
1597 dw_loc_list_ref d;
1598 for (d = list_head; d != NULL; d = d->dw_loc_next)
1599 loc_descr_plus_const (&d->expr, offset);
1600 }
1601
1602 #define DWARF_REF_SIZE \
1603 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1604
1605 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1606 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1607 DW_FORM_data16 with 128 bits. */
1608 #define DWARF_LARGEST_DATA_FORM_BITS \
1609 (dwarf_version >= 5 ? 128 : 64)
1610
1611 /* Utility inline function for construction of ops that were GNU extension
1612 before DWARF 5. */
1613 static inline enum dwarf_location_atom
1614 dwarf_OP (enum dwarf_location_atom op)
1615 {
1616 switch (op)
1617 {
1618 case DW_OP_implicit_pointer:
1619 if (dwarf_version < 5)
1620 return DW_OP_GNU_implicit_pointer;
1621 break;
1622
1623 case DW_OP_entry_value:
1624 if (dwarf_version < 5)
1625 return DW_OP_GNU_entry_value;
1626 break;
1627
1628 case DW_OP_const_type:
1629 if (dwarf_version < 5)
1630 return DW_OP_GNU_const_type;
1631 break;
1632
1633 case DW_OP_regval_type:
1634 if (dwarf_version < 5)
1635 return DW_OP_GNU_regval_type;
1636 break;
1637
1638 case DW_OP_deref_type:
1639 if (dwarf_version < 5)
1640 return DW_OP_GNU_deref_type;
1641 break;
1642
1643 case DW_OP_convert:
1644 if (dwarf_version < 5)
1645 return DW_OP_GNU_convert;
1646 break;
1647
1648 case DW_OP_reinterpret:
1649 if (dwarf_version < 5)
1650 return DW_OP_GNU_reinterpret;
1651 break;
1652
1653 case DW_OP_addrx:
1654 if (dwarf_version < 5)
1655 return DW_OP_GNU_addr_index;
1656 break;
1657
1658 case DW_OP_constx:
1659 if (dwarf_version < 5)
1660 return DW_OP_GNU_const_index;
1661 break;
1662
1663 default:
1664 break;
1665 }
1666 return op;
1667 }
1668
1669 /* Similarly for attributes. */
1670 static inline enum dwarf_attribute
1671 dwarf_AT (enum dwarf_attribute at)
1672 {
1673 switch (at)
1674 {
1675 case DW_AT_call_return_pc:
1676 if (dwarf_version < 5)
1677 return DW_AT_low_pc;
1678 break;
1679
1680 case DW_AT_call_tail_call:
1681 if (dwarf_version < 5)
1682 return DW_AT_GNU_tail_call;
1683 break;
1684
1685 case DW_AT_call_origin:
1686 if (dwarf_version < 5)
1687 return DW_AT_abstract_origin;
1688 break;
1689
1690 case DW_AT_call_target:
1691 if (dwarf_version < 5)
1692 return DW_AT_GNU_call_site_target;
1693 break;
1694
1695 case DW_AT_call_target_clobbered:
1696 if (dwarf_version < 5)
1697 return DW_AT_GNU_call_site_target_clobbered;
1698 break;
1699
1700 case DW_AT_call_parameter:
1701 if (dwarf_version < 5)
1702 return DW_AT_abstract_origin;
1703 break;
1704
1705 case DW_AT_call_value:
1706 if (dwarf_version < 5)
1707 return DW_AT_GNU_call_site_value;
1708 break;
1709
1710 case DW_AT_call_data_value:
1711 if (dwarf_version < 5)
1712 return DW_AT_GNU_call_site_data_value;
1713 break;
1714
1715 case DW_AT_call_all_calls:
1716 if (dwarf_version < 5)
1717 return DW_AT_GNU_all_call_sites;
1718 break;
1719
1720 case DW_AT_call_all_tail_calls:
1721 if (dwarf_version < 5)
1722 return DW_AT_GNU_all_tail_call_sites;
1723 break;
1724
1725 case DW_AT_dwo_name:
1726 if (dwarf_version < 5)
1727 return DW_AT_GNU_dwo_name;
1728 break;
1729
1730 case DW_AT_addr_base:
1731 if (dwarf_version < 5)
1732 return DW_AT_GNU_addr_base;
1733 break;
1734
1735 default:
1736 break;
1737 }
1738 return at;
1739 }
1740
1741 /* And similarly for tags. */
1742 static inline enum dwarf_tag
1743 dwarf_TAG (enum dwarf_tag tag)
1744 {
1745 switch (tag)
1746 {
1747 case DW_TAG_call_site:
1748 if (dwarf_version < 5)
1749 return DW_TAG_GNU_call_site;
1750 break;
1751
1752 case DW_TAG_call_site_parameter:
1753 if (dwarf_version < 5)
1754 return DW_TAG_GNU_call_site_parameter;
1755 break;
1756
1757 default:
1758 break;
1759 }
1760 return tag;
1761 }
1762
1763 /* And similarly for forms. */
1764 static inline enum dwarf_form
1765 dwarf_FORM (enum dwarf_form form)
1766 {
1767 switch (form)
1768 {
1769 case DW_FORM_addrx:
1770 if (dwarf_version < 5)
1771 return DW_FORM_GNU_addr_index;
1772 break;
1773
1774 case DW_FORM_strx:
1775 if (dwarf_version < 5)
1776 return DW_FORM_GNU_str_index;
1777 break;
1778
1779 default:
1780 break;
1781 }
1782 return form;
1783 }
1784
1785 static unsigned long int get_base_type_offset (dw_die_ref);
1786
1787 /* Return the size of a location descriptor. */
1788
1789 static unsigned long
1790 size_of_loc_descr (dw_loc_descr_ref loc)
1791 {
1792 unsigned long size = 1;
1793
1794 switch (loc->dw_loc_opc)
1795 {
1796 case DW_OP_addr:
1797 size += DWARF2_ADDR_SIZE;
1798 break;
1799 case DW_OP_GNU_addr_index:
1800 case DW_OP_addrx:
1801 case DW_OP_GNU_const_index:
1802 case DW_OP_constx:
1803 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1804 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1805 break;
1806 case DW_OP_const1u:
1807 case DW_OP_const1s:
1808 size += 1;
1809 break;
1810 case DW_OP_const2u:
1811 case DW_OP_const2s:
1812 size += 2;
1813 break;
1814 case DW_OP_const4u:
1815 case DW_OP_const4s:
1816 size += 4;
1817 break;
1818 case DW_OP_const8u:
1819 case DW_OP_const8s:
1820 size += 8;
1821 break;
1822 case DW_OP_constu:
1823 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1824 break;
1825 case DW_OP_consts:
1826 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1827 break;
1828 case DW_OP_pick:
1829 size += 1;
1830 break;
1831 case DW_OP_plus_uconst:
1832 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1833 break;
1834 case DW_OP_skip:
1835 case DW_OP_bra:
1836 size += 2;
1837 break;
1838 case DW_OP_breg0:
1839 case DW_OP_breg1:
1840 case DW_OP_breg2:
1841 case DW_OP_breg3:
1842 case DW_OP_breg4:
1843 case DW_OP_breg5:
1844 case DW_OP_breg6:
1845 case DW_OP_breg7:
1846 case DW_OP_breg8:
1847 case DW_OP_breg9:
1848 case DW_OP_breg10:
1849 case DW_OP_breg11:
1850 case DW_OP_breg12:
1851 case DW_OP_breg13:
1852 case DW_OP_breg14:
1853 case DW_OP_breg15:
1854 case DW_OP_breg16:
1855 case DW_OP_breg17:
1856 case DW_OP_breg18:
1857 case DW_OP_breg19:
1858 case DW_OP_breg20:
1859 case DW_OP_breg21:
1860 case DW_OP_breg22:
1861 case DW_OP_breg23:
1862 case DW_OP_breg24:
1863 case DW_OP_breg25:
1864 case DW_OP_breg26:
1865 case DW_OP_breg27:
1866 case DW_OP_breg28:
1867 case DW_OP_breg29:
1868 case DW_OP_breg30:
1869 case DW_OP_breg31:
1870 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1871 break;
1872 case DW_OP_regx:
1873 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1874 break;
1875 case DW_OP_fbreg:
1876 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1877 break;
1878 case DW_OP_bregx:
1879 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1880 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1881 break;
1882 case DW_OP_piece:
1883 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1884 break;
1885 case DW_OP_bit_piece:
1886 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1887 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1888 break;
1889 case DW_OP_deref_size:
1890 case DW_OP_xderef_size:
1891 size += 1;
1892 break;
1893 case DW_OP_call2:
1894 size += 2;
1895 break;
1896 case DW_OP_call4:
1897 size += 4;
1898 break;
1899 case DW_OP_call_ref:
1900 case DW_OP_GNU_variable_value:
1901 size += DWARF_REF_SIZE;
1902 break;
1903 case DW_OP_implicit_value:
1904 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1905 + loc->dw_loc_oprnd1.v.val_unsigned;
1906 break;
1907 case DW_OP_implicit_pointer:
1908 case DW_OP_GNU_implicit_pointer:
1909 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1910 break;
1911 case DW_OP_entry_value:
1912 case DW_OP_GNU_entry_value:
1913 {
1914 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1915 size += size_of_uleb128 (op_size) + op_size;
1916 break;
1917 }
1918 case DW_OP_const_type:
1919 case DW_OP_GNU_const_type:
1920 {
1921 unsigned long o
1922 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1923 size += size_of_uleb128 (o) + 1;
1924 switch (loc->dw_loc_oprnd2.val_class)
1925 {
1926 case dw_val_class_vec:
1927 size += loc->dw_loc_oprnd2.v.val_vec.length
1928 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1929 break;
1930 case dw_val_class_const:
1931 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1932 break;
1933 case dw_val_class_const_double:
1934 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1935 break;
1936 case dw_val_class_wide_int:
1937 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1938 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1939 break;
1940 default:
1941 gcc_unreachable ();
1942 }
1943 break;
1944 }
1945 case DW_OP_regval_type:
1946 case DW_OP_GNU_regval_type:
1947 {
1948 unsigned long o
1949 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1950 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1951 + size_of_uleb128 (o);
1952 }
1953 break;
1954 case DW_OP_deref_type:
1955 case DW_OP_GNU_deref_type:
1956 {
1957 unsigned long o
1958 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1959 size += 1 + size_of_uleb128 (o);
1960 }
1961 break;
1962 case DW_OP_convert:
1963 case DW_OP_reinterpret:
1964 case DW_OP_GNU_convert:
1965 case DW_OP_GNU_reinterpret:
1966 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1967 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1968 else
1969 {
1970 unsigned long o
1971 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1972 size += size_of_uleb128 (o);
1973 }
1974 break;
1975 case DW_OP_GNU_parameter_ref:
1976 size += 4;
1977 break;
1978 default:
1979 break;
1980 }
1981
1982 return size;
1983 }
1984
1985 /* Return the size of a series of location descriptors. */
1986
1987 unsigned long
1988 size_of_locs (dw_loc_descr_ref loc)
1989 {
1990 dw_loc_descr_ref l;
1991 unsigned long size;
1992
1993 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1994 field, to avoid writing to a PCH file. */
1995 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1996 {
1997 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1998 break;
1999 size += size_of_loc_descr (l);
2000 }
2001 if (! l)
2002 return size;
2003
2004 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2005 {
2006 l->dw_loc_addr = size;
2007 size += size_of_loc_descr (l);
2008 }
2009
2010 return size;
2011 }
2012
2013 /* Return the size of the value in a DW_AT_discr_value attribute. */
2014
2015 static int
2016 size_of_discr_value (dw_discr_value *discr_value)
2017 {
2018 if (discr_value->pos)
2019 return size_of_uleb128 (discr_value->v.uval);
2020 else
2021 return size_of_sleb128 (discr_value->v.sval);
2022 }
2023
2024 /* Return the size of the value in a DW_AT_discr_list attribute. */
2025
2026 static int
2027 size_of_discr_list (dw_discr_list_ref discr_list)
2028 {
2029 int size = 0;
2030
2031 for (dw_discr_list_ref list = discr_list;
2032 list != NULL;
2033 list = list->dw_discr_next)
2034 {
2035 /* One byte for the discriminant value descriptor, and then one or two
2036 LEB128 numbers, depending on whether it's a single case label or a
2037 range label. */
2038 size += 1;
2039 size += size_of_discr_value (&list->dw_discr_lower_bound);
2040 if (list->dw_discr_range != 0)
2041 size += size_of_discr_value (&list->dw_discr_upper_bound);
2042 }
2043 return size;
2044 }
2045
2046 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2047 static void get_ref_die_offset_label (char *, dw_die_ref);
2048 static unsigned long int get_ref_die_offset (dw_die_ref);
2049
2050 /* Output location description stack opcode's operands (if any).
2051 The for_eh_or_skip parameter controls whether register numbers are
2052 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2053 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2054 info). This should be suppressed for the cases that have not been converted
2055 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2056
2057 static void
2058 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2059 {
2060 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2061 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2062
2063 switch (loc->dw_loc_opc)
2064 {
2065 #ifdef DWARF2_DEBUGGING_INFO
2066 case DW_OP_const2u:
2067 case DW_OP_const2s:
2068 dw2_asm_output_data (2, val1->v.val_int, NULL);
2069 break;
2070 case DW_OP_const4u:
2071 if (loc->dtprel)
2072 {
2073 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2074 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2075 val1->v.val_addr);
2076 fputc ('\n', asm_out_file);
2077 break;
2078 }
2079 /* FALLTHRU */
2080 case DW_OP_const4s:
2081 dw2_asm_output_data (4, val1->v.val_int, NULL);
2082 break;
2083 case DW_OP_const8u:
2084 if (loc->dtprel)
2085 {
2086 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2087 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2088 val1->v.val_addr);
2089 fputc ('\n', asm_out_file);
2090 break;
2091 }
2092 /* FALLTHRU */
2093 case DW_OP_const8s:
2094 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2095 dw2_asm_output_data (8, val1->v.val_int, NULL);
2096 break;
2097 case DW_OP_skip:
2098 case DW_OP_bra:
2099 {
2100 int offset;
2101
2102 gcc_assert (val1->val_class == dw_val_class_loc);
2103 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2104
2105 dw2_asm_output_data (2, offset, NULL);
2106 }
2107 break;
2108 case DW_OP_implicit_value:
2109 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2110 switch (val2->val_class)
2111 {
2112 case dw_val_class_const:
2113 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2114 break;
2115 case dw_val_class_vec:
2116 {
2117 unsigned int elt_size = val2->v.val_vec.elt_size;
2118 unsigned int len = val2->v.val_vec.length;
2119 unsigned int i;
2120 unsigned char *p;
2121
2122 if (elt_size > sizeof (HOST_WIDE_INT))
2123 {
2124 elt_size /= 2;
2125 len *= 2;
2126 }
2127 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2128 i < len;
2129 i++, p += elt_size)
2130 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2131 "fp or vector constant word %u", i);
2132 }
2133 break;
2134 case dw_val_class_const_double:
2135 {
2136 unsigned HOST_WIDE_INT first, second;
2137
2138 if (WORDS_BIG_ENDIAN)
2139 {
2140 first = val2->v.val_double.high;
2141 second = val2->v.val_double.low;
2142 }
2143 else
2144 {
2145 first = val2->v.val_double.low;
2146 second = val2->v.val_double.high;
2147 }
2148 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2149 first, NULL);
2150 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2151 second, NULL);
2152 }
2153 break;
2154 case dw_val_class_wide_int:
2155 {
2156 int i;
2157 int len = get_full_len (*val2->v.val_wide);
2158 if (WORDS_BIG_ENDIAN)
2159 for (i = len - 1; i >= 0; --i)
2160 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2161 val2->v.val_wide->elt (i), NULL);
2162 else
2163 for (i = 0; i < len; ++i)
2164 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2165 val2->v.val_wide->elt (i), NULL);
2166 }
2167 break;
2168 case dw_val_class_addr:
2169 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2170 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2171 break;
2172 default:
2173 gcc_unreachable ();
2174 }
2175 break;
2176 #else
2177 case DW_OP_const2u:
2178 case DW_OP_const2s:
2179 case DW_OP_const4u:
2180 case DW_OP_const4s:
2181 case DW_OP_const8u:
2182 case DW_OP_const8s:
2183 case DW_OP_skip:
2184 case DW_OP_bra:
2185 case DW_OP_implicit_value:
2186 /* We currently don't make any attempt to make sure these are
2187 aligned properly like we do for the main unwind info, so
2188 don't support emitting things larger than a byte if we're
2189 only doing unwinding. */
2190 gcc_unreachable ();
2191 #endif
2192 case DW_OP_const1u:
2193 case DW_OP_const1s:
2194 dw2_asm_output_data (1, val1->v.val_int, NULL);
2195 break;
2196 case DW_OP_constu:
2197 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2198 break;
2199 case DW_OP_consts:
2200 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2201 break;
2202 case DW_OP_pick:
2203 dw2_asm_output_data (1, val1->v.val_int, NULL);
2204 break;
2205 case DW_OP_plus_uconst:
2206 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2207 break;
2208 case DW_OP_breg0:
2209 case DW_OP_breg1:
2210 case DW_OP_breg2:
2211 case DW_OP_breg3:
2212 case DW_OP_breg4:
2213 case DW_OP_breg5:
2214 case DW_OP_breg6:
2215 case DW_OP_breg7:
2216 case DW_OP_breg8:
2217 case DW_OP_breg9:
2218 case DW_OP_breg10:
2219 case DW_OP_breg11:
2220 case DW_OP_breg12:
2221 case DW_OP_breg13:
2222 case DW_OP_breg14:
2223 case DW_OP_breg15:
2224 case DW_OP_breg16:
2225 case DW_OP_breg17:
2226 case DW_OP_breg18:
2227 case DW_OP_breg19:
2228 case DW_OP_breg20:
2229 case DW_OP_breg21:
2230 case DW_OP_breg22:
2231 case DW_OP_breg23:
2232 case DW_OP_breg24:
2233 case DW_OP_breg25:
2234 case DW_OP_breg26:
2235 case DW_OP_breg27:
2236 case DW_OP_breg28:
2237 case DW_OP_breg29:
2238 case DW_OP_breg30:
2239 case DW_OP_breg31:
2240 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2241 break;
2242 case DW_OP_regx:
2243 {
2244 unsigned r = val1->v.val_unsigned;
2245 if (for_eh_or_skip >= 0)
2246 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2247 gcc_assert (size_of_uleb128 (r)
2248 == size_of_uleb128 (val1->v.val_unsigned));
2249 dw2_asm_output_data_uleb128 (r, NULL);
2250 }
2251 break;
2252 case DW_OP_fbreg:
2253 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2254 break;
2255 case DW_OP_bregx:
2256 {
2257 unsigned r = val1->v.val_unsigned;
2258 if (for_eh_or_skip >= 0)
2259 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2260 gcc_assert (size_of_uleb128 (r)
2261 == size_of_uleb128 (val1->v.val_unsigned));
2262 dw2_asm_output_data_uleb128 (r, NULL);
2263 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2264 }
2265 break;
2266 case DW_OP_piece:
2267 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2268 break;
2269 case DW_OP_bit_piece:
2270 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2271 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2272 break;
2273 case DW_OP_deref_size:
2274 case DW_OP_xderef_size:
2275 dw2_asm_output_data (1, val1->v.val_int, NULL);
2276 break;
2277
2278 case DW_OP_addr:
2279 if (loc->dtprel)
2280 {
2281 if (targetm.asm_out.output_dwarf_dtprel)
2282 {
2283 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2284 DWARF2_ADDR_SIZE,
2285 val1->v.val_addr);
2286 fputc ('\n', asm_out_file);
2287 }
2288 else
2289 gcc_unreachable ();
2290 }
2291 else
2292 {
2293 #ifdef DWARF2_DEBUGGING_INFO
2294 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2295 #else
2296 gcc_unreachable ();
2297 #endif
2298 }
2299 break;
2300
2301 case DW_OP_GNU_addr_index:
2302 case DW_OP_addrx:
2303 case DW_OP_GNU_const_index:
2304 case DW_OP_constx:
2305 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2306 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2307 "(index into .debug_addr)");
2308 break;
2309
2310 case DW_OP_call2:
2311 case DW_OP_call4:
2312 {
2313 unsigned long die_offset
2314 = get_ref_die_offset (val1->v.val_die_ref.die);
2315 /* Make sure the offset has been computed and that we can encode it as
2316 an operand. */
2317 gcc_assert (die_offset > 0
2318 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2319 ? 0xffff
2320 : 0xffffffff));
2321 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2322 die_offset, NULL);
2323 }
2324 break;
2325
2326 case DW_OP_call_ref:
2327 case DW_OP_GNU_variable_value:
2328 {
2329 char label[MAX_ARTIFICIAL_LABEL_BYTES
2330 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2331 gcc_assert (val1->val_class == dw_val_class_die_ref);
2332 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2333 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2334 }
2335 break;
2336
2337 case DW_OP_implicit_pointer:
2338 case DW_OP_GNU_implicit_pointer:
2339 {
2340 char label[MAX_ARTIFICIAL_LABEL_BYTES
2341 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2342 gcc_assert (val1->val_class == dw_val_class_die_ref);
2343 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2344 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2345 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2346 }
2347 break;
2348
2349 case DW_OP_entry_value:
2350 case DW_OP_GNU_entry_value:
2351 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2352 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2353 break;
2354
2355 case DW_OP_const_type:
2356 case DW_OP_GNU_const_type:
2357 {
2358 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2359 gcc_assert (o);
2360 dw2_asm_output_data_uleb128 (o, NULL);
2361 switch (val2->val_class)
2362 {
2363 case dw_val_class_const:
2364 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2365 dw2_asm_output_data (1, l, NULL);
2366 dw2_asm_output_data (l, val2->v.val_int, NULL);
2367 break;
2368 case dw_val_class_vec:
2369 {
2370 unsigned int elt_size = val2->v.val_vec.elt_size;
2371 unsigned int len = val2->v.val_vec.length;
2372 unsigned int i;
2373 unsigned char *p;
2374
2375 l = len * elt_size;
2376 dw2_asm_output_data (1, l, NULL);
2377 if (elt_size > sizeof (HOST_WIDE_INT))
2378 {
2379 elt_size /= 2;
2380 len *= 2;
2381 }
2382 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2383 i < len;
2384 i++, p += elt_size)
2385 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2386 "fp or vector constant word %u", i);
2387 }
2388 break;
2389 case dw_val_class_const_double:
2390 {
2391 unsigned HOST_WIDE_INT first, second;
2392 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2393
2394 dw2_asm_output_data (1, 2 * l, NULL);
2395 if (WORDS_BIG_ENDIAN)
2396 {
2397 first = val2->v.val_double.high;
2398 second = val2->v.val_double.low;
2399 }
2400 else
2401 {
2402 first = val2->v.val_double.low;
2403 second = val2->v.val_double.high;
2404 }
2405 dw2_asm_output_data (l, first, NULL);
2406 dw2_asm_output_data (l, second, NULL);
2407 }
2408 break;
2409 case dw_val_class_wide_int:
2410 {
2411 int i;
2412 int len = get_full_len (*val2->v.val_wide);
2413 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2414
2415 dw2_asm_output_data (1, len * l, NULL);
2416 if (WORDS_BIG_ENDIAN)
2417 for (i = len - 1; i >= 0; --i)
2418 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2419 else
2420 for (i = 0; i < len; ++i)
2421 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2422 }
2423 break;
2424 default:
2425 gcc_unreachable ();
2426 }
2427 }
2428 break;
2429 case DW_OP_regval_type:
2430 case DW_OP_GNU_regval_type:
2431 {
2432 unsigned r = val1->v.val_unsigned;
2433 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2434 gcc_assert (o);
2435 if (for_eh_or_skip >= 0)
2436 {
2437 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2438 gcc_assert (size_of_uleb128 (r)
2439 == size_of_uleb128 (val1->v.val_unsigned));
2440 }
2441 dw2_asm_output_data_uleb128 (r, NULL);
2442 dw2_asm_output_data_uleb128 (o, NULL);
2443 }
2444 break;
2445 case DW_OP_deref_type:
2446 case DW_OP_GNU_deref_type:
2447 {
2448 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2449 gcc_assert (o);
2450 dw2_asm_output_data (1, val1->v.val_int, NULL);
2451 dw2_asm_output_data_uleb128 (o, NULL);
2452 }
2453 break;
2454 case DW_OP_convert:
2455 case DW_OP_reinterpret:
2456 case DW_OP_GNU_convert:
2457 case DW_OP_GNU_reinterpret:
2458 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2459 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2460 else
2461 {
2462 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2463 gcc_assert (o);
2464 dw2_asm_output_data_uleb128 (o, NULL);
2465 }
2466 break;
2467
2468 case DW_OP_GNU_parameter_ref:
2469 {
2470 unsigned long o;
2471 gcc_assert (val1->val_class == dw_val_class_die_ref);
2472 o = get_ref_die_offset (val1->v.val_die_ref.die);
2473 dw2_asm_output_data (4, o, NULL);
2474 }
2475 break;
2476
2477 default:
2478 /* Other codes have no operands. */
2479 break;
2480 }
2481 }
2482
2483 /* Output a sequence of location operations.
2484 The for_eh_or_skip parameter controls whether register numbers are
2485 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2486 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2487 info). This should be suppressed for the cases that have not been converted
2488 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2489
2490 void
2491 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2492 {
2493 for (; loc != NULL; loc = loc->dw_loc_next)
2494 {
2495 enum dwarf_location_atom opc = loc->dw_loc_opc;
2496 /* Output the opcode. */
2497 if (for_eh_or_skip >= 0
2498 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2499 {
2500 unsigned r = (opc - DW_OP_breg0);
2501 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2502 gcc_assert (r <= 31);
2503 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2504 }
2505 else if (for_eh_or_skip >= 0
2506 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2507 {
2508 unsigned r = (opc - DW_OP_reg0);
2509 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2510 gcc_assert (r <= 31);
2511 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2512 }
2513
2514 dw2_asm_output_data (1, opc,
2515 "%s", dwarf_stack_op_name (opc));
2516
2517 /* Output the operand(s) (if any). */
2518 output_loc_operands (loc, for_eh_or_skip);
2519 }
2520 }
2521
2522 /* Output location description stack opcode's operands (if any).
2523 The output is single bytes on a line, suitable for .cfi_escape. */
2524
2525 static void
2526 output_loc_operands_raw (dw_loc_descr_ref loc)
2527 {
2528 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2529 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2530
2531 switch (loc->dw_loc_opc)
2532 {
2533 case DW_OP_addr:
2534 case DW_OP_GNU_addr_index:
2535 case DW_OP_addrx:
2536 case DW_OP_GNU_const_index:
2537 case DW_OP_constx:
2538 case DW_OP_implicit_value:
2539 /* We cannot output addresses in .cfi_escape, only bytes. */
2540 gcc_unreachable ();
2541
2542 case DW_OP_const1u:
2543 case DW_OP_const1s:
2544 case DW_OP_pick:
2545 case DW_OP_deref_size:
2546 case DW_OP_xderef_size:
2547 fputc (',', asm_out_file);
2548 dw2_asm_output_data_raw (1, val1->v.val_int);
2549 break;
2550
2551 case DW_OP_const2u:
2552 case DW_OP_const2s:
2553 fputc (',', asm_out_file);
2554 dw2_asm_output_data_raw (2, val1->v.val_int);
2555 break;
2556
2557 case DW_OP_const4u:
2558 case DW_OP_const4s:
2559 fputc (',', asm_out_file);
2560 dw2_asm_output_data_raw (4, val1->v.val_int);
2561 break;
2562
2563 case DW_OP_const8u:
2564 case DW_OP_const8s:
2565 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2566 fputc (',', asm_out_file);
2567 dw2_asm_output_data_raw (8, val1->v.val_int);
2568 break;
2569
2570 case DW_OP_skip:
2571 case DW_OP_bra:
2572 {
2573 int offset;
2574
2575 gcc_assert (val1->val_class == dw_val_class_loc);
2576 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2577
2578 fputc (',', asm_out_file);
2579 dw2_asm_output_data_raw (2, offset);
2580 }
2581 break;
2582
2583 case DW_OP_regx:
2584 {
2585 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2586 gcc_assert (size_of_uleb128 (r)
2587 == size_of_uleb128 (val1->v.val_unsigned));
2588 fputc (',', asm_out_file);
2589 dw2_asm_output_data_uleb128_raw (r);
2590 }
2591 break;
2592
2593 case DW_OP_constu:
2594 case DW_OP_plus_uconst:
2595 case DW_OP_piece:
2596 fputc (',', asm_out_file);
2597 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2598 break;
2599
2600 case DW_OP_bit_piece:
2601 fputc (',', asm_out_file);
2602 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2603 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2604 break;
2605
2606 case DW_OP_consts:
2607 case DW_OP_breg0:
2608 case DW_OP_breg1:
2609 case DW_OP_breg2:
2610 case DW_OP_breg3:
2611 case DW_OP_breg4:
2612 case DW_OP_breg5:
2613 case DW_OP_breg6:
2614 case DW_OP_breg7:
2615 case DW_OP_breg8:
2616 case DW_OP_breg9:
2617 case DW_OP_breg10:
2618 case DW_OP_breg11:
2619 case DW_OP_breg12:
2620 case DW_OP_breg13:
2621 case DW_OP_breg14:
2622 case DW_OP_breg15:
2623 case DW_OP_breg16:
2624 case DW_OP_breg17:
2625 case DW_OP_breg18:
2626 case DW_OP_breg19:
2627 case DW_OP_breg20:
2628 case DW_OP_breg21:
2629 case DW_OP_breg22:
2630 case DW_OP_breg23:
2631 case DW_OP_breg24:
2632 case DW_OP_breg25:
2633 case DW_OP_breg26:
2634 case DW_OP_breg27:
2635 case DW_OP_breg28:
2636 case DW_OP_breg29:
2637 case DW_OP_breg30:
2638 case DW_OP_breg31:
2639 case DW_OP_fbreg:
2640 fputc (',', asm_out_file);
2641 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2642 break;
2643
2644 case DW_OP_bregx:
2645 {
2646 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2647 gcc_assert (size_of_uleb128 (r)
2648 == size_of_uleb128 (val1->v.val_unsigned));
2649 fputc (',', asm_out_file);
2650 dw2_asm_output_data_uleb128_raw (r);
2651 fputc (',', asm_out_file);
2652 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2653 }
2654 break;
2655
2656 case DW_OP_implicit_pointer:
2657 case DW_OP_entry_value:
2658 case DW_OP_const_type:
2659 case DW_OP_regval_type:
2660 case DW_OP_deref_type:
2661 case DW_OP_convert:
2662 case DW_OP_reinterpret:
2663 case DW_OP_GNU_implicit_pointer:
2664 case DW_OP_GNU_entry_value:
2665 case DW_OP_GNU_const_type:
2666 case DW_OP_GNU_regval_type:
2667 case DW_OP_GNU_deref_type:
2668 case DW_OP_GNU_convert:
2669 case DW_OP_GNU_reinterpret:
2670 case DW_OP_GNU_parameter_ref:
2671 gcc_unreachable ();
2672 break;
2673
2674 default:
2675 /* Other codes have no operands. */
2676 break;
2677 }
2678 }
2679
2680 void
2681 output_loc_sequence_raw (dw_loc_descr_ref loc)
2682 {
2683 while (1)
2684 {
2685 enum dwarf_location_atom opc = loc->dw_loc_opc;
2686 /* Output the opcode. */
2687 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2688 {
2689 unsigned r = (opc - DW_OP_breg0);
2690 r = DWARF2_FRAME_REG_OUT (r, 1);
2691 gcc_assert (r <= 31);
2692 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2693 }
2694 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2695 {
2696 unsigned r = (opc - DW_OP_reg0);
2697 r = DWARF2_FRAME_REG_OUT (r, 1);
2698 gcc_assert (r <= 31);
2699 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2700 }
2701 /* Output the opcode. */
2702 fprintf (asm_out_file, "%#x", opc);
2703 output_loc_operands_raw (loc);
2704
2705 if (!loc->dw_loc_next)
2706 break;
2707 loc = loc->dw_loc_next;
2708
2709 fputc (',', asm_out_file);
2710 }
2711 }
2712
2713 /* This function builds a dwarf location descriptor sequence from a
2714 dw_cfa_location, adding the given OFFSET to the result of the
2715 expression. */
2716
2717 struct dw_loc_descr_node *
2718 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2719 {
2720 struct dw_loc_descr_node *head, *tmp;
2721
2722 offset += cfa->offset;
2723
2724 if (cfa->indirect)
2725 {
2726 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2727 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2728 head->dw_loc_oprnd1.val_entry = NULL;
2729 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2730 add_loc_descr (&head, tmp);
2731 loc_descr_plus_const (&head, offset);
2732 }
2733 else
2734 head = new_reg_loc_descr (cfa->reg, offset);
2735
2736 return head;
2737 }
2738
2739 /* This function builds a dwarf location descriptor sequence for
2740 the address at OFFSET from the CFA when stack is aligned to
2741 ALIGNMENT byte. */
2742
2743 struct dw_loc_descr_node *
2744 build_cfa_aligned_loc (dw_cfa_location *cfa,
2745 poly_int64 offset, HOST_WIDE_INT alignment)
2746 {
2747 struct dw_loc_descr_node *head;
2748 unsigned int dwarf_fp
2749 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2750
2751 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2752 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2753 {
2754 head = new_reg_loc_descr (dwarf_fp, 0);
2755 add_loc_descr (&head, int_loc_descriptor (alignment));
2756 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2757 loc_descr_plus_const (&head, offset);
2758 }
2759 else
2760 head = new_reg_loc_descr (dwarf_fp, offset);
2761 return head;
2762 }
2763 \f
2764 /* And now, the support for symbolic debugging information. */
2765
2766 /* .debug_str support. */
2767
2768 static void dwarf2out_init (const char *);
2769 static void dwarf2out_finish (const char *);
2770 static void dwarf2out_early_finish (const char *);
2771 static void dwarf2out_assembly_start (void);
2772 static void dwarf2out_define (unsigned int, const char *);
2773 static void dwarf2out_undef (unsigned int, const char *);
2774 static void dwarf2out_start_source_file (unsigned, const char *);
2775 static void dwarf2out_end_source_file (unsigned);
2776 static void dwarf2out_function_decl (tree);
2777 static void dwarf2out_begin_block (unsigned, unsigned);
2778 static void dwarf2out_end_block (unsigned, unsigned);
2779 static bool dwarf2out_ignore_block (const_tree);
2780 static void dwarf2out_early_global_decl (tree);
2781 static void dwarf2out_late_global_decl (tree);
2782 static void dwarf2out_type_decl (tree, int);
2783 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2784 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2785 dw_die_ref);
2786 static void dwarf2out_abstract_function (tree);
2787 static void dwarf2out_var_location (rtx_insn *);
2788 static void dwarf2out_inline_entry (tree);
2789 static void dwarf2out_size_function (tree);
2790 static void dwarf2out_begin_function (tree);
2791 static void dwarf2out_end_function (unsigned int);
2792 static void dwarf2out_register_main_translation_unit (tree unit);
2793 static void dwarf2out_set_name (tree, tree);
2794 static void dwarf2out_register_external_die (tree decl, const char *sym,
2795 unsigned HOST_WIDE_INT off);
2796 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2797 unsigned HOST_WIDE_INT *off);
2798
2799 /* The debug hooks structure. */
2800
2801 const struct gcc_debug_hooks dwarf2_debug_hooks =
2802 {
2803 dwarf2out_init,
2804 dwarf2out_finish,
2805 dwarf2out_early_finish,
2806 dwarf2out_assembly_start,
2807 dwarf2out_define,
2808 dwarf2out_undef,
2809 dwarf2out_start_source_file,
2810 dwarf2out_end_source_file,
2811 dwarf2out_begin_block,
2812 dwarf2out_end_block,
2813 dwarf2out_ignore_block,
2814 dwarf2out_source_line,
2815 dwarf2out_begin_prologue,
2816 #if VMS_DEBUGGING_INFO
2817 dwarf2out_vms_end_prologue,
2818 dwarf2out_vms_begin_epilogue,
2819 #else
2820 debug_nothing_int_charstar,
2821 debug_nothing_int_charstar,
2822 #endif
2823 dwarf2out_end_epilogue,
2824 dwarf2out_begin_function,
2825 dwarf2out_end_function, /* end_function */
2826 dwarf2out_register_main_translation_unit,
2827 dwarf2out_function_decl, /* function_decl */
2828 dwarf2out_early_global_decl,
2829 dwarf2out_late_global_decl,
2830 dwarf2out_type_decl, /* type_decl */
2831 dwarf2out_imported_module_or_decl,
2832 dwarf2out_die_ref_for_decl,
2833 dwarf2out_register_external_die,
2834 debug_nothing_tree, /* deferred_inline_function */
2835 /* The DWARF 2 backend tries to reduce debugging bloat by not
2836 emitting the abstract description of inline functions until
2837 something tries to reference them. */
2838 dwarf2out_abstract_function, /* outlining_inline_function */
2839 debug_nothing_rtx_code_label, /* label */
2840 debug_nothing_int, /* handle_pch */
2841 dwarf2out_var_location,
2842 dwarf2out_inline_entry, /* inline_entry */
2843 dwarf2out_size_function, /* size_function */
2844 dwarf2out_switch_text_section,
2845 dwarf2out_set_name,
2846 1, /* start_end_main_source_file */
2847 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2848 };
2849
2850 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2851 {
2852 dwarf2out_init,
2853 debug_nothing_charstar,
2854 debug_nothing_charstar,
2855 dwarf2out_assembly_start,
2856 debug_nothing_int_charstar,
2857 debug_nothing_int_charstar,
2858 debug_nothing_int_charstar,
2859 debug_nothing_int,
2860 debug_nothing_int_int, /* begin_block */
2861 debug_nothing_int_int, /* end_block */
2862 debug_true_const_tree, /* ignore_block */
2863 dwarf2out_source_line, /* source_line */
2864 debug_nothing_int_int_charstar, /* begin_prologue */
2865 debug_nothing_int_charstar, /* end_prologue */
2866 debug_nothing_int_charstar, /* begin_epilogue */
2867 debug_nothing_int_charstar, /* end_epilogue */
2868 debug_nothing_tree, /* begin_function */
2869 debug_nothing_int, /* end_function */
2870 debug_nothing_tree, /* register_main_translation_unit */
2871 debug_nothing_tree, /* function_decl */
2872 debug_nothing_tree, /* early_global_decl */
2873 debug_nothing_tree, /* late_global_decl */
2874 debug_nothing_tree_int, /* type_decl */
2875 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2876 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2877 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2878 debug_nothing_tree, /* deferred_inline_function */
2879 debug_nothing_tree, /* outlining_inline_function */
2880 debug_nothing_rtx_code_label, /* label */
2881 debug_nothing_int, /* handle_pch */
2882 debug_nothing_rtx_insn, /* var_location */
2883 debug_nothing_tree, /* inline_entry */
2884 debug_nothing_tree, /* size_function */
2885 debug_nothing_void, /* switch_text_section */
2886 debug_nothing_tree_tree, /* set_name */
2887 0, /* start_end_main_source_file */
2888 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2889 };
2890 \f
2891 /* NOTE: In the comments in this file, many references are made to
2892 "Debugging Information Entries". This term is abbreviated as `DIE'
2893 throughout the remainder of this file. */
2894
2895 /* An internal representation of the DWARF output is built, and then
2896 walked to generate the DWARF debugging info. The walk of the internal
2897 representation is done after the entire program has been compiled.
2898 The types below are used to describe the internal representation. */
2899
2900 /* Whether to put type DIEs into their own section .debug_types instead
2901 of making them part of the .debug_info section. Only supported for
2902 Dwarf V4 or higher and the user didn't disable them through
2903 -fno-debug-types-section. It is more efficient to put them in a
2904 separate comdat sections since the linker will then be able to
2905 remove duplicates. But not all tools support .debug_types sections
2906 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2907 it is DW_UT_type unit type in .debug_info section. */
2908
2909 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2910
2911 /* Various DIE's use offsets relative to the beginning of the
2912 .debug_info section to refer to each other. */
2913
2914 typedef long int dw_offset;
2915
2916 struct comdat_type_node;
2917
2918 /* The entries in the line_info table more-or-less mirror the opcodes
2919 that are used in the real dwarf line table. Arrays of these entries
2920 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2921 supported. */
2922
2923 enum dw_line_info_opcode {
2924 /* Emit DW_LNE_set_address; the operand is the label index. */
2925 LI_set_address,
2926
2927 /* Emit a row to the matrix with the given line. This may be done
2928 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2929 special opcodes. */
2930 LI_set_line,
2931
2932 /* Emit a DW_LNS_set_file. */
2933 LI_set_file,
2934
2935 /* Emit a DW_LNS_set_column. */
2936 LI_set_column,
2937
2938 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2939 LI_negate_stmt,
2940
2941 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2942 LI_set_prologue_end,
2943 LI_set_epilogue_begin,
2944
2945 /* Emit a DW_LNE_set_discriminator. */
2946 LI_set_discriminator,
2947
2948 /* Output a Fixed Advance PC; the target PC is the label index; the
2949 base PC is the previous LI_adv_address or LI_set_address entry.
2950 We only use this when emitting debug views without assembler
2951 support, at explicit user request. Ideally, we should only use
2952 it when the offset might be zero but we can't tell: it's the only
2953 way to maybe change the PC without resetting the view number. */
2954 LI_adv_address
2955 };
2956
2957 typedef struct GTY(()) dw_line_info_struct {
2958 enum dw_line_info_opcode opcode;
2959 unsigned int val;
2960 } dw_line_info_entry;
2961
2962
2963 struct GTY(()) dw_line_info_table {
2964 /* The label that marks the end of this section. */
2965 const char *end_label;
2966
2967 /* The values for the last row of the matrix, as collected in the table.
2968 These are used to minimize the changes to the next row. */
2969 unsigned int file_num;
2970 unsigned int line_num;
2971 unsigned int column_num;
2972 int discrim_num;
2973 bool is_stmt;
2974 bool in_use;
2975
2976 /* This denotes the NEXT view number.
2977
2978 If it is 0, it is known that the NEXT view will be the first view
2979 at the given PC.
2980
2981 If it is -1, we're forcing the view number to be reset, e.g. at a
2982 function entry.
2983
2984 The meaning of other nonzero values depends on whether we're
2985 computing views internally or leaving it for the assembler to do
2986 so. If we're emitting them internally, view denotes the view
2987 number since the last known advance of PC. If we're leaving it
2988 for the assembler, it denotes the LVU label number that we're
2989 going to ask the assembler to assign. */
2990 var_loc_view view;
2991
2992 /* This counts the number of symbolic views emitted in this table
2993 since the latest view reset. Its max value, over all tables,
2994 sets symview_upper_bound. */
2995 var_loc_view symviews_since_reset;
2996
2997 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
2998 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
2999 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3000 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3001
3002 vec<dw_line_info_entry, va_gc> *entries;
3003 };
3004
3005 /* This is an upper bound for view numbers that the assembler may
3006 assign to symbolic views output in this translation. It is used to
3007 decide how big a field to use to represent view numbers in
3008 symview-classed attributes. */
3009
3010 static var_loc_view symview_upper_bound;
3011
3012 /* If we're keep track of location views and their reset points, and
3013 INSN is a reset point (i.e., it necessarily advances the PC), mark
3014 the next view in TABLE as reset. */
3015
3016 static void
3017 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3018 {
3019 if (!debug_internal_reset_location_views)
3020 return;
3021
3022 /* Maybe turn (part of?) this test into a default target hook. */
3023 int reset = 0;
3024
3025 if (targetm.reset_location_view)
3026 reset = targetm.reset_location_view (insn);
3027
3028 if (reset)
3029 ;
3030 else if (JUMP_TABLE_DATA_P (insn))
3031 reset = 1;
3032 else if (GET_CODE (insn) == USE
3033 || GET_CODE (insn) == CLOBBER
3034 || GET_CODE (insn) == ASM_INPUT
3035 || asm_noperands (insn) >= 0)
3036 ;
3037 else if (get_attr_min_length (insn) > 0)
3038 reset = 1;
3039
3040 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3041 RESET_NEXT_VIEW (table->view);
3042 }
3043
3044 /* Each DIE attribute has a field specifying the attribute kind,
3045 a link to the next attribute in the chain, and an attribute value.
3046 Attributes are typically linked below the DIE they modify. */
3047
3048 typedef struct GTY(()) dw_attr_struct {
3049 enum dwarf_attribute dw_attr;
3050 dw_val_node dw_attr_val;
3051 }
3052 dw_attr_node;
3053
3054
3055 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3056 The children of each node form a circular list linked by
3057 die_sib. die_child points to the node *before* the "first" child node. */
3058
3059 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3060 union die_symbol_or_type_node
3061 {
3062 const char * GTY ((tag ("0"))) die_symbol;
3063 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3064 }
3065 GTY ((desc ("%0.comdat_type_p"))) die_id;
3066 vec<dw_attr_node, va_gc> *die_attr;
3067 dw_die_ref die_parent;
3068 dw_die_ref die_child;
3069 dw_die_ref die_sib;
3070 dw_die_ref die_definition; /* ref from a specification to its definition */
3071 dw_offset die_offset;
3072 unsigned long die_abbrev;
3073 int die_mark;
3074 unsigned int decl_id;
3075 enum dwarf_tag die_tag;
3076 /* Die is used and must not be pruned as unused. */
3077 BOOL_BITFIELD die_perennial_p : 1;
3078 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3079 /* For an external ref to die_symbol if die_offset contains an extra
3080 offset to that symbol. */
3081 BOOL_BITFIELD with_offset : 1;
3082 /* Whether this DIE was removed from the DIE tree, for example via
3083 prune_unused_types. We don't consider those present from the
3084 DIE lookup routines. */
3085 BOOL_BITFIELD removed : 1;
3086 /* Lots of spare bits. */
3087 }
3088 die_node;
3089
3090 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3091 static bool early_dwarf;
3092 static bool early_dwarf_finished;
3093 struct set_early_dwarf {
3094 bool saved;
3095 set_early_dwarf () : saved(early_dwarf)
3096 {
3097 gcc_assert (! early_dwarf_finished);
3098 early_dwarf = true;
3099 }
3100 ~set_early_dwarf () { early_dwarf = saved; }
3101 };
3102
3103 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3104 #define FOR_EACH_CHILD(die, c, expr) do { \
3105 c = die->die_child; \
3106 if (c) do { \
3107 c = c->die_sib; \
3108 expr; \
3109 } while (c != die->die_child); \
3110 } while (0)
3111
3112 /* The pubname structure */
3113
3114 typedef struct GTY(()) pubname_struct {
3115 dw_die_ref die;
3116 const char *name;
3117 }
3118 pubname_entry;
3119
3120
3121 struct GTY(()) dw_ranges {
3122 const char *label;
3123 /* If this is positive, it's a block number, otherwise it's a
3124 bitwise-negated index into dw_ranges_by_label. */
3125 int num;
3126 /* Index for the range list for DW_FORM_rnglistx. */
3127 unsigned int idx : 31;
3128 /* True if this range might be possibly in a different section
3129 from previous entry. */
3130 unsigned int maybe_new_sec : 1;
3131 };
3132
3133 /* A structure to hold a macinfo entry. */
3134
3135 typedef struct GTY(()) macinfo_struct {
3136 unsigned char code;
3137 unsigned HOST_WIDE_INT lineno;
3138 const char *info;
3139 }
3140 macinfo_entry;
3141
3142
3143 struct GTY(()) dw_ranges_by_label {
3144 const char *begin;
3145 const char *end;
3146 };
3147
3148 /* The comdat type node structure. */
3149 struct GTY(()) comdat_type_node
3150 {
3151 dw_die_ref root_die;
3152 dw_die_ref type_die;
3153 dw_die_ref skeleton_die;
3154 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3155 comdat_type_node *next;
3156 };
3157
3158 /* A list of DIEs for which we can't determine ancestry (parent_die
3159 field) just yet. Later in dwarf2out_finish we will fill in the
3160 missing bits. */
3161 typedef struct GTY(()) limbo_die_struct {
3162 dw_die_ref die;
3163 /* The tree for which this DIE was created. We use this to
3164 determine ancestry later. */
3165 tree created_for;
3166 struct limbo_die_struct *next;
3167 }
3168 limbo_die_node;
3169
3170 typedef struct skeleton_chain_struct
3171 {
3172 dw_die_ref old_die;
3173 dw_die_ref new_die;
3174 struct skeleton_chain_struct *parent;
3175 }
3176 skeleton_chain_node;
3177
3178 /* Define a macro which returns nonzero for a TYPE_DECL which was
3179 implicitly generated for a type.
3180
3181 Note that, unlike the C front-end (which generates a NULL named
3182 TYPE_DECL node for each complete tagged type, each array type,
3183 and each function type node created) the C++ front-end generates
3184 a _named_ TYPE_DECL node for each tagged type node created.
3185 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3186 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3187 front-end, but for each type, tagged or not. */
3188
3189 #define TYPE_DECL_IS_STUB(decl) \
3190 (DECL_NAME (decl) == NULL_TREE \
3191 || (DECL_ARTIFICIAL (decl) \
3192 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3193 /* This is necessary for stub decls that \
3194 appear in nested inline functions. */ \
3195 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3196 && (decl_ultimate_origin (decl) \
3197 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3198
3199 /* Information concerning the compilation unit's programming
3200 language, and compiler version. */
3201
3202 /* Fixed size portion of the DWARF compilation unit header. */
3203 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3204 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3205 + (dwarf_version >= 5 ? 4 : 3))
3206
3207 /* Fixed size portion of the DWARF comdat type unit header. */
3208 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3209 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3210 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3211
3212 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3213 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3214 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3215
3216 /* Fixed size portion of public names info. */
3217 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3218
3219 /* Fixed size portion of the address range info. */
3220 #define DWARF_ARANGES_HEADER_SIZE \
3221 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3222 DWARF2_ADDR_SIZE * 2) \
3223 - DWARF_INITIAL_LENGTH_SIZE)
3224
3225 /* Size of padding portion in the address range info. It must be
3226 aligned to twice the pointer size. */
3227 #define DWARF_ARANGES_PAD_SIZE \
3228 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3229 DWARF2_ADDR_SIZE * 2) \
3230 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3231
3232 /* Use assembler line directives if available. */
3233 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3234 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3235 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3236 #else
3237 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3238 #endif
3239 #endif
3240
3241 /* Use assembler views in line directives if available. */
3242 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3243 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3244 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3245 #else
3246 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3247 #endif
3248 #endif
3249
3250 /* Return true if GCC configure detected assembler support for .loc. */
3251
3252 bool
3253 dwarf2out_default_as_loc_support (void)
3254 {
3255 return DWARF2_ASM_LINE_DEBUG_INFO;
3256 #if (GCC_VERSION >= 3000)
3257 # undef DWARF2_ASM_LINE_DEBUG_INFO
3258 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3259 #endif
3260 }
3261
3262 /* Return true if GCC configure detected assembler support for views
3263 in .loc directives. */
3264
3265 bool
3266 dwarf2out_default_as_locview_support (void)
3267 {
3268 return DWARF2_ASM_VIEW_DEBUG_INFO;
3269 #if (GCC_VERSION >= 3000)
3270 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3271 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3272 #endif
3273 }
3274
3275 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3276 view computation, and it refers to a view identifier for which we
3277 will not emit a label because it is known to map to a view number
3278 zero. We won't allocate the bitmap if we're not using assembler
3279 support for location views, but we have to make the variable
3280 visible for GGC and for code that will be optimized out for lack of
3281 support but that's still parsed and compiled. We could abstract it
3282 out with macros, but it's not worth it. */
3283 static GTY(()) bitmap zero_view_p;
3284
3285 /* Evaluate to TRUE iff N is known to identify the first location view
3286 at its PC. When not using assembler location view computation,
3287 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3288 and views label numbers recorded in it are the ones known to be
3289 zero. */
3290 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3291 || (N) == (var_loc_view)-1 \
3292 || (zero_view_p \
3293 && bitmap_bit_p (zero_view_p, (N))))
3294
3295 /* Return true iff we're to emit .loc directives for the assembler to
3296 generate line number sections.
3297
3298 When we're not emitting views, all we need from the assembler is
3299 support for .loc directives.
3300
3301 If we are emitting views, we can only use the assembler's .loc
3302 support if it also supports views.
3303
3304 When the compiler is emitting the line number programs and
3305 computing view numbers itself, it resets view numbers at known PC
3306 changes and counts from that, and then it emits view numbers as
3307 literal constants in locviewlists. There are cases in which the
3308 compiler is not sure about PC changes, e.g. when extra alignment is
3309 requested for a label. In these cases, the compiler may not reset
3310 the view counter, and the potential PC advance in the line number
3311 program will use an opcode that does not reset the view counter
3312 even if the PC actually changes, so that compiler and debug info
3313 consumer can keep view numbers in sync.
3314
3315 When the compiler defers view computation to the assembler, it
3316 emits symbolic view numbers in locviewlists, with the exception of
3317 views known to be zero (forced resets, or reset after
3318 compiler-visible PC changes): instead of emitting symbols for
3319 these, we emit literal zero and assert the assembler agrees with
3320 the compiler's assessment. We could use symbolic views everywhere,
3321 instead of special-casing zero views, but then we'd be unable to
3322 optimize out locviewlists that contain only zeros. */
3323
3324 static bool
3325 output_asm_line_debug_info (void)
3326 {
3327 return (dwarf2out_as_loc_support
3328 && (dwarf2out_as_locview_support
3329 || !debug_variable_location_views));
3330 }
3331
3332 /* Minimum line offset in a special line info. opcode.
3333 This value was chosen to give a reasonable range of values. */
3334 #define DWARF_LINE_BASE -10
3335
3336 /* First special line opcode - leave room for the standard opcodes. */
3337 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3338
3339 /* Range of line offsets in a special line info. opcode. */
3340 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3341
3342 /* Flag that indicates the initial value of the is_stmt_start flag.
3343 In the present implementation, we do not mark any lines as
3344 the beginning of a source statement, because that information
3345 is not made available by the GCC front-end. */
3346 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3347
3348 /* Maximum number of operations per instruction bundle. */
3349 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3350 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3351 #endif
3352
3353 /* This location is used by calc_die_sizes() to keep track
3354 the offset of each DIE within the .debug_info section. */
3355 static unsigned long next_die_offset;
3356
3357 /* Record the root of the DIE's built for the current compilation unit. */
3358 static GTY(()) dw_die_ref single_comp_unit_die;
3359
3360 /* A list of type DIEs that have been separated into comdat sections. */
3361 static GTY(()) comdat_type_node *comdat_type_list;
3362
3363 /* A list of CU DIEs that have been separated. */
3364 static GTY(()) limbo_die_node *cu_die_list;
3365
3366 /* A list of DIEs with a NULL parent waiting to be relocated. */
3367 static GTY(()) limbo_die_node *limbo_die_list;
3368
3369 /* A list of DIEs for which we may have to generate
3370 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3371 static GTY(()) limbo_die_node *deferred_asm_name;
3372
3373 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3374 {
3375 typedef const char *compare_type;
3376
3377 static hashval_t hash (dwarf_file_data *);
3378 static bool equal (dwarf_file_data *, const char *);
3379 };
3380
3381 /* Filenames referenced by this compilation unit. */
3382 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3383
3384 struct decl_die_hasher : ggc_ptr_hash<die_node>
3385 {
3386 typedef tree compare_type;
3387
3388 static hashval_t hash (die_node *);
3389 static bool equal (die_node *, tree);
3390 };
3391 /* A hash table of references to DIE's that describe declarations.
3392 The key is a DECL_UID() which is a unique number identifying each decl. */
3393 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3394
3395 struct GTY ((for_user)) variable_value_struct {
3396 unsigned int decl_id;
3397 vec<dw_die_ref, va_gc> *dies;
3398 };
3399
3400 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3401 {
3402 typedef tree compare_type;
3403
3404 static hashval_t hash (variable_value_struct *);
3405 static bool equal (variable_value_struct *, tree);
3406 };
3407 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3408 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3409 DECL_CONTEXT of the referenced VAR_DECLs. */
3410 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3411
3412 struct block_die_hasher : ggc_ptr_hash<die_struct>
3413 {
3414 static hashval_t hash (die_struct *);
3415 static bool equal (die_struct *, die_struct *);
3416 };
3417
3418 /* A hash table of references to DIE's that describe COMMON blocks.
3419 The key is DECL_UID() ^ die_parent. */
3420 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3421
3422 typedef struct GTY(()) die_arg_entry_struct {
3423 dw_die_ref die;
3424 tree arg;
3425 } die_arg_entry;
3426
3427
3428 /* Node of the variable location list. */
3429 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3430 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3431 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3432 in mode of the EXPR_LIST node and first EXPR_LIST operand
3433 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3434 location or NULL for padding. For larger bitsizes,
3435 mode is 0 and first operand is a CONCAT with bitsize
3436 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3437 NULL as second operand. */
3438 rtx GTY (()) loc;
3439 const char * GTY (()) label;
3440 struct var_loc_node * GTY (()) next;
3441 var_loc_view view;
3442 };
3443
3444 /* Variable location list. */
3445 struct GTY ((for_user)) var_loc_list_def {
3446 struct var_loc_node * GTY (()) first;
3447
3448 /* Pointer to the last but one or last element of the
3449 chained list. If the list is empty, both first and
3450 last are NULL, if the list contains just one node
3451 or the last node certainly is not redundant, it points
3452 to the last node, otherwise points to the last but one.
3453 Do not mark it for GC because it is marked through the chain. */
3454 struct var_loc_node * GTY ((skip ("%h"))) last;
3455
3456 /* Pointer to the last element before section switch,
3457 if NULL, either sections weren't switched or first
3458 is after section switch. */
3459 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3460
3461 /* DECL_UID of the variable decl. */
3462 unsigned int decl_id;
3463 };
3464 typedef struct var_loc_list_def var_loc_list;
3465
3466 /* Call argument location list. */
3467 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3468 rtx GTY (()) call_arg_loc_note;
3469 const char * GTY (()) label;
3470 tree GTY (()) block;
3471 bool tail_call_p;
3472 rtx GTY (()) symbol_ref;
3473 struct call_arg_loc_node * GTY (()) next;
3474 };
3475
3476
3477 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3478 {
3479 typedef const_tree compare_type;
3480
3481 static hashval_t hash (var_loc_list *);
3482 static bool equal (var_loc_list *, const_tree);
3483 };
3484
3485 /* Table of decl location linked lists. */
3486 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3487
3488 /* Head and tail of call_arg_loc chain. */
3489 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3490 static struct call_arg_loc_node *call_arg_loc_last;
3491
3492 /* Number of call sites in the current function. */
3493 static int call_site_count = -1;
3494 /* Number of tail call sites in the current function. */
3495 static int tail_call_site_count = -1;
3496
3497 /* A cached location list. */
3498 struct GTY ((for_user)) cached_dw_loc_list_def {
3499 /* The DECL_UID of the decl that this entry describes. */
3500 unsigned int decl_id;
3501
3502 /* The cached location list. */
3503 dw_loc_list_ref loc_list;
3504 };
3505 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3506
3507 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3508 {
3509
3510 typedef const_tree compare_type;
3511
3512 static hashval_t hash (cached_dw_loc_list *);
3513 static bool equal (cached_dw_loc_list *, const_tree);
3514 };
3515
3516 /* Table of cached location lists. */
3517 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3518
3519 /* A vector of references to DIE's that are uniquely identified by their tag,
3520 presence/absence of children DIE's, and list of attribute/value pairs. */
3521 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3522
3523 /* A hash map to remember the stack usage for DWARF procedures. The value
3524 stored is the stack size difference between before the DWARF procedure
3525 invokation and after it returned. In other words, for a DWARF procedure
3526 that consumes N stack slots and that pushes M ones, this stores M - N. */
3527 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3528
3529 /* A global counter for generating labels for line number data. */
3530 static unsigned int line_info_label_num;
3531
3532 /* The current table to which we should emit line number information
3533 for the current function. This will be set up at the beginning of
3534 assembly for the function. */
3535 static GTY(()) dw_line_info_table *cur_line_info_table;
3536
3537 /* The two default tables of line number info. */
3538 static GTY(()) dw_line_info_table *text_section_line_info;
3539 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3540
3541 /* The set of all non-default tables of line number info. */
3542 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3543
3544 /* A flag to tell pubnames/types export if there is an info section to
3545 refer to. */
3546 static bool info_section_emitted;
3547
3548 /* A pointer to the base of a table that contains a list of publicly
3549 accessible names. */
3550 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3551
3552 /* A pointer to the base of a table that contains a list of publicly
3553 accessible types. */
3554 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3555
3556 /* A pointer to the base of a table that contains a list of macro
3557 defines/undefines (and file start/end markers). */
3558 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3559
3560 /* True if .debug_macinfo or .debug_macros section is going to be
3561 emitted. */
3562 #define have_macinfo \
3563 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3564 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3565 && !macinfo_table->is_empty ())
3566
3567 /* Vector of dies for which we should generate .debug_ranges info. */
3568 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3569
3570 /* Vector of pairs of labels referenced in ranges_table. */
3571 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3572
3573 /* Whether we have location lists that need outputting */
3574 static GTY(()) bool have_location_lists;
3575
3576 /* Unique label counter. */
3577 static GTY(()) unsigned int loclabel_num;
3578
3579 /* Unique label counter for point-of-call tables. */
3580 static GTY(()) unsigned int poc_label_num;
3581
3582 /* The last file entry emitted by maybe_emit_file(). */
3583 static GTY(()) struct dwarf_file_data * last_emitted_file;
3584
3585 /* Number of internal labels generated by gen_internal_sym(). */
3586 static GTY(()) int label_num;
3587
3588 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3589
3590 /* Instances of generic types for which we need to generate debug
3591 info that describe their generic parameters and arguments. That
3592 generation needs to happen once all types are properly laid out so
3593 we do it at the end of compilation. */
3594 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3595
3596 /* Offset from the "steady-state frame pointer" to the frame base,
3597 within the current function. */
3598 static poly_int64 frame_pointer_fb_offset;
3599 static bool frame_pointer_fb_offset_valid;
3600
3601 static vec<dw_die_ref> base_types;
3602
3603 /* Flags to represent a set of attribute classes for attributes that represent
3604 a scalar value (bounds, pointers, ...). */
3605 enum dw_scalar_form
3606 {
3607 dw_scalar_form_constant = 0x01,
3608 dw_scalar_form_exprloc = 0x02,
3609 dw_scalar_form_reference = 0x04
3610 };
3611
3612 /* Forward declarations for functions defined in this file. */
3613
3614 static int is_pseudo_reg (const_rtx);
3615 static tree type_main_variant (tree);
3616 static int is_tagged_type (const_tree);
3617 static const char *dwarf_tag_name (unsigned);
3618 static const char *dwarf_attr_name (unsigned);
3619 static const char *dwarf_form_name (unsigned);
3620 static tree decl_ultimate_origin (const_tree);
3621 static tree decl_class_context (tree);
3622 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3623 static inline enum dw_val_class AT_class (dw_attr_node *);
3624 static inline unsigned int AT_index (dw_attr_node *);
3625 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3626 static inline unsigned AT_flag (dw_attr_node *);
3627 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3628 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3629 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3630 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3631 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3632 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3633 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3634 unsigned int, unsigned char *);
3635 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3636 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3637 static inline const char *AT_string (dw_attr_node *);
3638 static enum dwarf_form AT_string_form (dw_attr_node *);
3639 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3640 static void add_AT_specification (dw_die_ref, dw_die_ref);
3641 static inline dw_die_ref AT_ref (dw_attr_node *);
3642 static inline int AT_ref_external (dw_attr_node *);
3643 static inline void set_AT_ref_external (dw_attr_node *, int);
3644 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3645 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3646 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3647 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3648 dw_loc_list_ref);
3649 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3650 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3651 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3652 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3653 static void remove_addr_table_entry (addr_table_entry *);
3654 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3655 static inline rtx AT_addr (dw_attr_node *);
3656 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3657 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3658 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3659 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3660 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3661 const char *);
3662 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3663 unsigned HOST_WIDE_INT);
3664 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3665 unsigned long, bool);
3666 static inline const char *AT_lbl (dw_attr_node *);
3667 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3668 static const char *get_AT_low_pc (dw_die_ref);
3669 static const char *get_AT_hi_pc (dw_die_ref);
3670 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3671 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3672 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3673 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3674 static bool is_cxx (void);
3675 static bool is_cxx (const_tree);
3676 static bool is_fortran (void);
3677 static bool is_ada (void);
3678 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3679 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3680 static void add_child_die (dw_die_ref, dw_die_ref);
3681 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3682 static dw_die_ref lookup_type_die (tree);
3683 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3684 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3685 static void equate_type_number_to_die (tree, dw_die_ref);
3686 static dw_die_ref lookup_decl_die (tree);
3687 static var_loc_list *lookup_decl_loc (const_tree);
3688 static void equate_decl_number_to_die (tree, dw_die_ref);
3689 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3690 static void print_spaces (FILE *);
3691 static void print_die (dw_die_ref, FILE *);
3692 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3693 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3694 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3695 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3696 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3697 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3698 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3699 struct md5_ctx *, int *);
3700 struct checksum_attributes;
3701 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3702 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3703 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3704 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3705 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3706 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3707 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3708 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3709 static int is_type_die (dw_die_ref);
3710 static int is_comdat_die (dw_die_ref);
3711 static inline bool is_template_instantiation (dw_die_ref);
3712 static int is_declaration_die (dw_die_ref);
3713 static int should_move_die_to_comdat (dw_die_ref);
3714 static dw_die_ref clone_as_declaration (dw_die_ref);
3715 static dw_die_ref clone_die (dw_die_ref);
3716 static dw_die_ref clone_tree (dw_die_ref);
3717 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3718 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3719 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3720 static dw_die_ref generate_skeleton (dw_die_ref);
3721 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3722 dw_die_ref,
3723 dw_die_ref);
3724 static void break_out_comdat_types (dw_die_ref);
3725 static void copy_decls_for_unworthy_types (dw_die_ref);
3726
3727 static void add_sibling_attributes (dw_die_ref);
3728 static void output_location_lists (dw_die_ref);
3729 static int constant_size (unsigned HOST_WIDE_INT);
3730 static unsigned long size_of_die (dw_die_ref);
3731 static void calc_die_sizes (dw_die_ref);
3732 static void calc_base_type_die_sizes (void);
3733 static void mark_dies (dw_die_ref);
3734 static void unmark_dies (dw_die_ref);
3735 static void unmark_all_dies (dw_die_ref);
3736 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3737 static unsigned long size_of_aranges (void);
3738 static enum dwarf_form value_format (dw_attr_node *);
3739 static void output_value_format (dw_attr_node *);
3740 static void output_abbrev_section (void);
3741 static void output_die_abbrevs (unsigned long, dw_die_ref);
3742 static void output_die (dw_die_ref);
3743 static void output_compilation_unit_header (enum dwarf_unit_type);
3744 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3745 static void output_comdat_type_unit (comdat_type_node *);
3746 static const char *dwarf2_name (tree, int);
3747 static void add_pubname (tree, dw_die_ref);
3748 static void add_enumerator_pubname (const char *, dw_die_ref);
3749 static void add_pubname_string (const char *, dw_die_ref);
3750 static void add_pubtype (tree, dw_die_ref);
3751 static void output_pubnames (vec<pubname_entry, va_gc> *);
3752 static void output_aranges (void);
3753 static unsigned int add_ranges (const_tree, bool = false);
3754 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3755 bool *, bool);
3756 static void output_ranges (void);
3757 static dw_line_info_table *new_line_info_table (void);
3758 static void output_line_info (bool);
3759 static void output_file_names (void);
3760 static dw_die_ref base_type_die (tree, bool);
3761 static int is_base_type (tree);
3762 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3763 static int decl_quals (const_tree);
3764 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3765 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3766 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3767 static int type_is_enum (const_tree);
3768 static unsigned int dbx_reg_number (const_rtx);
3769 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3770 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3771 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3772 enum var_init_status);
3773 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3774 enum var_init_status);
3775 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3776 enum var_init_status);
3777 static int is_based_loc (const_rtx);
3778 static bool resolve_one_addr (rtx *);
3779 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3780 enum var_init_status);
3781 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3782 enum var_init_status);
3783 struct loc_descr_context;
3784 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3785 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3786 static dw_loc_list_ref loc_list_from_tree (tree, int,
3787 struct loc_descr_context *);
3788 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3789 struct loc_descr_context *);
3790 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3791 static tree field_type (const_tree);
3792 static unsigned int simple_type_align_in_bits (const_tree);
3793 static unsigned int simple_decl_align_in_bits (const_tree);
3794 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3795 struct vlr_context;
3796 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3797 HOST_WIDE_INT *);
3798 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3799 dw_loc_list_ref);
3800 static void add_data_member_location_attribute (dw_die_ref, tree,
3801 struct vlr_context *);
3802 static bool add_const_value_attribute (dw_die_ref, rtx);
3803 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3804 static void insert_wide_int (const wide_int &, unsigned char *, int);
3805 static void insert_float (const_rtx, unsigned char *);
3806 static rtx rtl_for_decl_location (tree);
3807 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3808 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3809 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3810 static void add_name_attribute (dw_die_ref, const char *);
3811 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3812 static void add_comp_dir_attribute (dw_die_ref);
3813 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3814 struct loc_descr_context *);
3815 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3816 struct loc_descr_context *);
3817 static void add_subscript_info (dw_die_ref, tree, bool);
3818 static void add_byte_size_attribute (dw_die_ref, tree);
3819 static void add_alignment_attribute (dw_die_ref, tree);
3820 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3821 struct vlr_context *);
3822 static void add_bit_size_attribute (dw_die_ref, tree);
3823 static void add_prototyped_attribute (dw_die_ref, tree);
3824 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3825 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3826 static void add_src_coords_attributes (dw_die_ref, tree);
3827 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3828 static void add_discr_value (dw_die_ref, dw_discr_value *);
3829 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3830 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3831 static dw_die_ref scope_die_for (tree, dw_die_ref);
3832 static inline int local_scope_p (dw_die_ref);
3833 static inline int class_scope_p (dw_die_ref);
3834 static inline int class_or_namespace_scope_p (dw_die_ref);
3835 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3836 static void add_calling_convention_attribute (dw_die_ref, tree);
3837 static const char *type_tag (const_tree);
3838 static tree member_declared_type (const_tree);
3839 #if 0
3840 static const char *decl_start_label (tree);
3841 #endif
3842 static void gen_array_type_die (tree, dw_die_ref);
3843 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3844 #if 0
3845 static void gen_entry_point_die (tree, dw_die_ref);
3846 #endif
3847 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3848 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3849 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3850 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3851 static void gen_formal_types_die (tree, dw_die_ref);
3852 static void gen_subprogram_die (tree, dw_die_ref);
3853 static void gen_variable_die (tree, tree, dw_die_ref);
3854 static void gen_const_die (tree, dw_die_ref);
3855 static void gen_label_die (tree, dw_die_ref);
3856 static void gen_lexical_block_die (tree, dw_die_ref);
3857 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3858 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3859 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3860 static dw_die_ref gen_compile_unit_die (const char *);
3861 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3862 static void gen_member_die (tree, dw_die_ref);
3863 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3864 enum debug_info_usage);
3865 static void gen_subroutine_type_die (tree, dw_die_ref);
3866 static void gen_typedef_die (tree, dw_die_ref);
3867 static void gen_type_die (tree, dw_die_ref);
3868 static void gen_block_die (tree, dw_die_ref);
3869 static void decls_for_scope (tree, dw_die_ref);
3870 static bool is_naming_typedef_decl (const_tree);
3871 static inline dw_die_ref get_context_die (tree);
3872 static void gen_namespace_die (tree, dw_die_ref);
3873 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3874 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3875 static dw_die_ref force_decl_die (tree);
3876 static dw_die_ref force_type_die (tree);
3877 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3878 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3879 static struct dwarf_file_data * lookup_filename (const char *);
3880 static void retry_incomplete_types (void);
3881 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3882 static void gen_generic_params_dies (tree);
3883 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3884 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3885 static void splice_child_die (dw_die_ref, dw_die_ref);
3886 static int file_info_cmp (const void *, const void *);
3887 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3888 const char *, var_loc_view, const char *);
3889 static void output_loc_list (dw_loc_list_ref);
3890 static char *gen_internal_sym (const char *);
3891 static bool want_pubnames (void);
3892
3893 static void prune_unmark_dies (dw_die_ref);
3894 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3895 static void prune_unused_types_mark (dw_die_ref, int);
3896 static void prune_unused_types_walk (dw_die_ref);
3897 static void prune_unused_types_walk_attribs (dw_die_ref);
3898 static void prune_unused_types_prune (dw_die_ref);
3899 static void prune_unused_types (void);
3900 static int maybe_emit_file (struct dwarf_file_data *fd);
3901 static inline const char *AT_vms_delta1 (dw_attr_node *);
3902 static inline const char *AT_vms_delta2 (dw_attr_node *);
3903 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3904 const char *, const char *);
3905 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3906 static void gen_remaining_tmpl_value_param_die_attribute (void);
3907 static bool generic_type_p (tree);
3908 static void schedule_generic_params_dies_gen (tree t);
3909 static void gen_scheduled_generic_parms_dies (void);
3910 static void resolve_variable_values (void);
3911
3912 static const char *comp_dir_string (void);
3913
3914 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3915
3916 /* enum for tracking thread-local variables whose address is really an offset
3917 relative to the TLS pointer, which will need link-time relocation, but will
3918 not need relocation by the DWARF consumer. */
3919
3920 enum dtprel_bool
3921 {
3922 dtprel_false = 0,
3923 dtprel_true = 1
3924 };
3925
3926 /* Return the operator to use for an address of a variable. For dtprel_true, we
3927 use DW_OP_const*. For regular variables, which need both link-time
3928 relocation and consumer-level relocation (e.g., to account for shared objects
3929 loaded at a random address), we use DW_OP_addr*. */
3930
3931 static inline enum dwarf_location_atom
3932 dw_addr_op (enum dtprel_bool dtprel)
3933 {
3934 if (dtprel == dtprel_true)
3935 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3936 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3937 else
3938 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3939 }
3940
3941 /* Return a pointer to a newly allocated address location description. If
3942 dwarf_split_debug_info is true, then record the address with the appropriate
3943 relocation. */
3944 static inline dw_loc_descr_ref
3945 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3946 {
3947 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3948
3949 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3950 ref->dw_loc_oprnd1.v.val_addr = addr;
3951 ref->dtprel = dtprel;
3952 if (dwarf_split_debug_info)
3953 ref->dw_loc_oprnd1.val_entry
3954 = add_addr_table_entry (addr,
3955 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3956 else
3957 ref->dw_loc_oprnd1.val_entry = NULL;
3958
3959 return ref;
3960 }
3961
3962 /* Section names used to hold DWARF debugging information. */
3963
3964 #ifndef DEBUG_INFO_SECTION
3965 #define DEBUG_INFO_SECTION ".debug_info"
3966 #endif
3967 #ifndef DEBUG_DWO_INFO_SECTION
3968 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3969 #endif
3970 #ifndef DEBUG_LTO_INFO_SECTION
3971 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3972 #endif
3973 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3974 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3975 #endif
3976 #ifndef DEBUG_ABBREV_SECTION
3977 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3978 #endif
3979 #ifndef DEBUG_LTO_ABBREV_SECTION
3980 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3981 #endif
3982 #ifndef DEBUG_DWO_ABBREV_SECTION
3983 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3984 #endif
3985 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3986 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3987 #endif
3988 #ifndef DEBUG_ARANGES_SECTION
3989 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3990 #endif
3991 #ifndef DEBUG_ADDR_SECTION
3992 #define DEBUG_ADDR_SECTION ".debug_addr"
3993 #endif
3994 #ifndef DEBUG_MACINFO_SECTION
3995 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3996 #endif
3997 #ifndef DEBUG_LTO_MACINFO_SECTION
3998 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3999 #endif
4000 #ifndef DEBUG_DWO_MACINFO_SECTION
4001 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4002 #endif
4003 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4004 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4005 #endif
4006 #ifndef DEBUG_MACRO_SECTION
4007 #define DEBUG_MACRO_SECTION ".debug_macro"
4008 #endif
4009 #ifndef DEBUG_LTO_MACRO_SECTION
4010 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4011 #endif
4012 #ifndef DEBUG_DWO_MACRO_SECTION
4013 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4014 #endif
4015 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4016 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4017 #endif
4018 #ifndef DEBUG_LINE_SECTION
4019 #define DEBUG_LINE_SECTION ".debug_line"
4020 #endif
4021 #ifndef DEBUG_LTO_LINE_SECTION
4022 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4023 #endif
4024 #ifndef DEBUG_DWO_LINE_SECTION
4025 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4026 #endif
4027 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4028 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4029 #endif
4030 #ifndef DEBUG_LOC_SECTION
4031 #define DEBUG_LOC_SECTION ".debug_loc"
4032 #endif
4033 #ifndef DEBUG_DWO_LOC_SECTION
4034 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4035 #endif
4036 #ifndef DEBUG_LOCLISTS_SECTION
4037 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4038 #endif
4039 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4040 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4041 #endif
4042 #ifndef DEBUG_PUBNAMES_SECTION
4043 #define DEBUG_PUBNAMES_SECTION \
4044 ((debug_generate_pub_sections == 2) \
4045 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4046 #endif
4047 #ifndef DEBUG_PUBTYPES_SECTION
4048 #define DEBUG_PUBTYPES_SECTION \
4049 ((debug_generate_pub_sections == 2) \
4050 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4051 #endif
4052 #ifndef DEBUG_STR_OFFSETS_SECTION
4053 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4054 #endif
4055 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4056 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4057 #endif
4058 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4059 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4060 #endif
4061 #ifndef DEBUG_STR_SECTION
4062 #define DEBUG_STR_SECTION ".debug_str"
4063 #endif
4064 #ifndef DEBUG_LTO_STR_SECTION
4065 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4066 #endif
4067 #ifndef DEBUG_STR_DWO_SECTION
4068 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4069 #endif
4070 #ifndef DEBUG_LTO_STR_DWO_SECTION
4071 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4072 #endif
4073 #ifndef DEBUG_RANGES_SECTION
4074 #define DEBUG_RANGES_SECTION ".debug_ranges"
4075 #endif
4076 #ifndef DEBUG_RNGLISTS_SECTION
4077 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4078 #endif
4079 #ifndef DEBUG_LINE_STR_SECTION
4080 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4081 #endif
4082 #ifndef DEBUG_LTO_LINE_STR_SECTION
4083 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4084 #endif
4085
4086 /* Standard ELF section names for compiled code and data. */
4087 #ifndef TEXT_SECTION_NAME
4088 #define TEXT_SECTION_NAME ".text"
4089 #endif
4090
4091 /* Section flags for .debug_str section. */
4092 #define DEBUG_STR_SECTION_FLAGS \
4093 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4094 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4095 : SECTION_DEBUG)
4096
4097 /* Section flags for .debug_str.dwo section. */
4098 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4099
4100 /* Attribute used to refer to the macro section. */
4101 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4102 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4103
4104 /* Labels we insert at beginning sections we can reference instead of
4105 the section names themselves. */
4106
4107 #ifndef TEXT_SECTION_LABEL
4108 #define TEXT_SECTION_LABEL "Ltext"
4109 #endif
4110 #ifndef COLD_TEXT_SECTION_LABEL
4111 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4112 #endif
4113 #ifndef DEBUG_LINE_SECTION_LABEL
4114 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4115 #endif
4116 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4117 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4118 #endif
4119 #ifndef DEBUG_INFO_SECTION_LABEL
4120 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4121 #endif
4122 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4123 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4124 #endif
4125 #ifndef DEBUG_ABBREV_SECTION_LABEL
4126 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4127 #endif
4128 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4129 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4130 #endif
4131 #ifndef DEBUG_ADDR_SECTION_LABEL
4132 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4133 #endif
4134 #ifndef DEBUG_LOC_SECTION_LABEL
4135 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4136 #endif
4137 #ifndef DEBUG_RANGES_SECTION_LABEL
4138 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4139 #endif
4140 #ifndef DEBUG_MACINFO_SECTION_LABEL
4141 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4142 #endif
4143 #ifndef DEBUG_MACRO_SECTION_LABEL
4144 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4145 #endif
4146 #define SKELETON_COMP_DIE_ABBREV 1
4147 #define SKELETON_TYPE_DIE_ABBREV 2
4148
4149 /* Definitions of defaults for formats and names of various special
4150 (artificial) labels which may be generated within this file (when the -g
4151 options is used and DWARF2_DEBUGGING_INFO is in effect.
4152 If necessary, these may be overridden from within the tm.h file, but
4153 typically, overriding these defaults is unnecessary. */
4154
4155 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4156 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4157 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4158 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4159 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4160 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4161 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4162 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4163 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4164 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4170
4171 #ifndef TEXT_END_LABEL
4172 #define TEXT_END_LABEL "Letext"
4173 #endif
4174 #ifndef COLD_END_LABEL
4175 #define COLD_END_LABEL "Letext_cold"
4176 #endif
4177 #ifndef BLOCK_BEGIN_LABEL
4178 #define BLOCK_BEGIN_LABEL "LBB"
4179 #endif
4180 #ifndef BLOCK_INLINE_ENTRY_LABEL
4181 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4182 #endif
4183 #ifndef BLOCK_END_LABEL
4184 #define BLOCK_END_LABEL "LBE"
4185 #endif
4186 #ifndef LINE_CODE_LABEL
4187 #define LINE_CODE_LABEL "LM"
4188 #endif
4189
4190 \f
4191 /* Return the root of the DIE's built for the current compilation unit. */
4192 static dw_die_ref
4193 comp_unit_die (void)
4194 {
4195 if (!single_comp_unit_die)
4196 single_comp_unit_die = gen_compile_unit_die (NULL);
4197 return single_comp_unit_die;
4198 }
4199
4200 /* We allow a language front-end to designate a function that is to be
4201 called to "demangle" any name before it is put into a DIE. */
4202
4203 static const char *(*demangle_name_func) (const char *);
4204
4205 void
4206 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4207 {
4208 demangle_name_func = func;
4209 }
4210
4211 /* Test if rtl node points to a pseudo register. */
4212
4213 static inline int
4214 is_pseudo_reg (const_rtx rtl)
4215 {
4216 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4217 || (GET_CODE (rtl) == SUBREG
4218 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4219 }
4220
4221 /* Return a reference to a type, with its const and volatile qualifiers
4222 removed. */
4223
4224 static inline tree
4225 type_main_variant (tree type)
4226 {
4227 type = TYPE_MAIN_VARIANT (type);
4228
4229 /* ??? There really should be only one main variant among any group of
4230 variants of a given type (and all of the MAIN_VARIANT values for all
4231 members of the group should point to that one type) but sometimes the C
4232 front-end messes this up for array types, so we work around that bug
4233 here. */
4234 if (TREE_CODE (type) == ARRAY_TYPE)
4235 while (type != TYPE_MAIN_VARIANT (type))
4236 type = TYPE_MAIN_VARIANT (type);
4237
4238 return type;
4239 }
4240
4241 /* Return nonzero if the given type node represents a tagged type. */
4242
4243 static inline int
4244 is_tagged_type (const_tree type)
4245 {
4246 enum tree_code code = TREE_CODE (type);
4247
4248 return (code == RECORD_TYPE || code == UNION_TYPE
4249 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4250 }
4251
4252 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4253
4254 static void
4255 get_ref_die_offset_label (char *label, dw_die_ref ref)
4256 {
4257 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4258 }
4259
4260 /* Return die_offset of a DIE reference to a base type. */
4261
4262 static unsigned long int
4263 get_base_type_offset (dw_die_ref ref)
4264 {
4265 if (ref->die_offset)
4266 return ref->die_offset;
4267 if (comp_unit_die ()->die_abbrev)
4268 {
4269 calc_base_type_die_sizes ();
4270 gcc_assert (ref->die_offset);
4271 }
4272 return ref->die_offset;
4273 }
4274
4275 /* Return die_offset of a DIE reference other than base type. */
4276
4277 static unsigned long int
4278 get_ref_die_offset (dw_die_ref ref)
4279 {
4280 gcc_assert (ref->die_offset);
4281 return ref->die_offset;
4282 }
4283
4284 /* Convert a DIE tag into its string name. */
4285
4286 static const char *
4287 dwarf_tag_name (unsigned int tag)
4288 {
4289 const char *name = get_DW_TAG_name (tag);
4290
4291 if (name != NULL)
4292 return name;
4293
4294 return "DW_TAG_<unknown>";
4295 }
4296
4297 /* Convert a DWARF attribute code into its string name. */
4298
4299 static const char *
4300 dwarf_attr_name (unsigned int attr)
4301 {
4302 const char *name;
4303
4304 switch (attr)
4305 {
4306 #if VMS_DEBUGGING_INFO
4307 case DW_AT_HP_prologue:
4308 return "DW_AT_HP_prologue";
4309 #else
4310 case DW_AT_MIPS_loop_unroll_factor:
4311 return "DW_AT_MIPS_loop_unroll_factor";
4312 #endif
4313
4314 #if VMS_DEBUGGING_INFO
4315 case DW_AT_HP_epilogue:
4316 return "DW_AT_HP_epilogue";
4317 #else
4318 case DW_AT_MIPS_stride:
4319 return "DW_AT_MIPS_stride";
4320 #endif
4321 }
4322
4323 name = get_DW_AT_name (attr);
4324
4325 if (name != NULL)
4326 return name;
4327
4328 return "DW_AT_<unknown>";
4329 }
4330
4331 /* Convert a DWARF value form code into its string name. */
4332
4333 static const char *
4334 dwarf_form_name (unsigned int form)
4335 {
4336 const char *name = get_DW_FORM_name (form);
4337
4338 if (name != NULL)
4339 return name;
4340
4341 return "DW_FORM_<unknown>";
4342 }
4343 \f
4344 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4345 instance of an inlined instance of a decl which is local to an inline
4346 function, so we have to trace all of the way back through the origin chain
4347 to find out what sort of node actually served as the original seed for the
4348 given block. */
4349
4350 static tree
4351 decl_ultimate_origin (const_tree decl)
4352 {
4353 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4354 return NULL_TREE;
4355
4356 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4357 we're trying to output the abstract instance of this function. */
4358 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4359 return NULL_TREE;
4360
4361 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4362 most distant ancestor, this should never happen. */
4363 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4364
4365 return DECL_ABSTRACT_ORIGIN (decl);
4366 }
4367
4368 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4369 of a virtual function may refer to a base class, so we check the 'this'
4370 parameter. */
4371
4372 static tree
4373 decl_class_context (tree decl)
4374 {
4375 tree context = NULL_TREE;
4376
4377 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4378 context = DECL_CONTEXT (decl);
4379 else
4380 context = TYPE_MAIN_VARIANT
4381 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4382
4383 if (context && !TYPE_P (context))
4384 context = NULL_TREE;
4385
4386 return context;
4387 }
4388 \f
4389 /* Add an attribute/value pair to a DIE. */
4390
4391 static inline void
4392 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4393 {
4394 /* Maybe this should be an assert? */
4395 if (die == NULL)
4396 return;
4397
4398 if (flag_checking)
4399 {
4400 /* Check we do not add duplicate attrs. Can't use get_AT here
4401 because that recurses to the specification/abstract origin DIE. */
4402 dw_attr_node *a;
4403 unsigned ix;
4404 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4405 gcc_assert (a->dw_attr != attr->dw_attr);
4406 }
4407
4408 vec_safe_reserve (die->die_attr, 1);
4409 vec_safe_push (die->die_attr, *attr);
4410 }
4411
4412 static inline enum dw_val_class
4413 AT_class (dw_attr_node *a)
4414 {
4415 return a->dw_attr_val.val_class;
4416 }
4417
4418 /* Return the index for any attribute that will be referenced with a
4419 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4420 indices are stored in dw_attr_val.v.val_str for reference counting
4421 pruning. */
4422
4423 static inline unsigned int
4424 AT_index (dw_attr_node *a)
4425 {
4426 if (AT_class (a) == dw_val_class_str)
4427 return a->dw_attr_val.v.val_str->index;
4428 else if (a->dw_attr_val.val_entry != NULL)
4429 return a->dw_attr_val.val_entry->index;
4430 return NOT_INDEXED;
4431 }
4432
4433 /* Add a flag value attribute to a DIE. */
4434
4435 static inline void
4436 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4437 {
4438 dw_attr_node attr;
4439
4440 attr.dw_attr = attr_kind;
4441 attr.dw_attr_val.val_class = dw_val_class_flag;
4442 attr.dw_attr_val.val_entry = NULL;
4443 attr.dw_attr_val.v.val_flag = flag;
4444 add_dwarf_attr (die, &attr);
4445 }
4446
4447 static inline unsigned
4448 AT_flag (dw_attr_node *a)
4449 {
4450 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4451 return a->dw_attr_val.v.val_flag;
4452 }
4453
4454 /* Add a signed integer attribute value to a DIE. */
4455
4456 static inline void
4457 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4458 {
4459 dw_attr_node attr;
4460
4461 attr.dw_attr = attr_kind;
4462 attr.dw_attr_val.val_class = dw_val_class_const;
4463 attr.dw_attr_val.val_entry = NULL;
4464 attr.dw_attr_val.v.val_int = int_val;
4465 add_dwarf_attr (die, &attr);
4466 }
4467
4468 static inline HOST_WIDE_INT
4469 AT_int (dw_attr_node *a)
4470 {
4471 gcc_assert (a && (AT_class (a) == dw_val_class_const
4472 || AT_class (a) == dw_val_class_const_implicit));
4473 return a->dw_attr_val.v.val_int;
4474 }
4475
4476 /* Add an unsigned integer attribute value to a DIE. */
4477
4478 static inline void
4479 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4480 unsigned HOST_WIDE_INT unsigned_val)
4481 {
4482 dw_attr_node attr;
4483
4484 attr.dw_attr = attr_kind;
4485 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4486 attr.dw_attr_val.val_entry = NULL;
4487 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4488 add_dwarf_attr (die, &attr);
4489 }
4490
4491 static inline unsigned HOST_WIDE_INT
4492 AT_unsigned (dw_attr_node *a)
4493 {
4494 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4495 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4496 return a->dw_attr_val.v.val_unsigned;
4497 }
4498
4499 /* Add an unsigned wide integer attribute value to a DIE. */
4500
4501 static inline void
4502 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4503 const wide_int& w)
4504 {
4505 dw_attr_node attr;
4506
4507 attr.dw_attr = attr_kind;
4508 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4509 attr.dw_attr_val.val_entry = NULL;
4510 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4511 *attr.dw_attr_val.v.val_wide = w;
4512 add_dwarf_attr (die, &attr);
4513 }
4514
4515 /* Add an unsigned double integer attribute value to a DIE. */
4516
4517 static inline void
4518 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4519 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4520 {
4521 dw_attr_node attr;
4522
4523 attr.dw_attr = attr_kind;
4524 attr.dw_attr_val.val_class = dw_val_class_const_double;
4525 attr.dw_attr_val.val_entry = NULL;
4526 attr.dw_attr_val.v.val_double.high = high;
4527 attr.dw_attr_val.v.val_double.low = low;
4528 add_dwarf_attr (die, &attr);
4529 }
4530
4531 /* Add a floating point attribute value to a DIE and return it. */
4532
4533 static inline void
4534 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4535 unsigned int length, unsigned int elt_size, unsigned char *array)
4536 {
4537 dw_attr_node attr;
4538
4539 attr.dw_attr = attr_kind;
4540 attr.dw_attr_val.val_class = dw_val_class_vec;
4541 attr.dw_attr_val.val_entry = NULL;
4542 attr.dw_attr_val.v.val_vec.length = length;
4543 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4544 attr.dw_attr_val.v.val_vec.array = array;
4545 add_dwarf_attr (die, &attr);
4546 }
4547
4548 /* Add an 8-byte data attribute value to a DIE. */
4549
4550 static inline void
4551 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4552 unsigned char data8[8])
4553 {
4554 dw_attr_node attr;
4555
4556 attr.dw_attr = attr_kind;
4557 attr.dw_attr_val.val_class = dw_val_class_data8;
4558 attr.dw_attr_val.val_entry = NULL;
4559 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4560 add_dwarf_attr (die, &attr);
4561 }
4562
4563 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4564 dwarf_split_debug_info, address attributes in dies destined for the
4565 final executable have force_direct set to avoid using indexed
4566 references. */
4567
4568 static inline void
4569 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4570 bool force_direct)
4571 {
4572 dw_attr_node attr;
4573 char * lbl_id;
4574
4575 lbl_id = xstrdup (lbl_low);
4576 attr.dw_attr = DW_AT_low_pc;
4577 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4578 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4579 if (dwarf_split_debug_info && !force_direct)
4580 attr.dw_attr_val.val_entry
4581 = add_addr_table_entry (lbl_id, ate_kind_label);
4582 else
4583 attr.dw_attr_val.val_entry = NULL;
4584 add_dwarf_attr (die, &attr);
4585
4586 attr.dw_attr = DW_AT_high_pc;
4587 if (dwarf_version < 4)
4588 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4589 else
4590 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4591 lbl_id = xstrdup (lbl_high);
4592 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4593 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4594 && dwarf_split_debug_info && !force_direct)
4595 attr.dw_attr_val.val_entry
4596 = add_addr_table_entry (lbl_id, ate_kind_label);
4597 else
4598 attr.dw_attr_val.val_entry = NULL;
4599 add_dwarf_attr (die, &attr);
4600 }
4601
4602 /* Hash and equality functions for debug_str_hash. */
4603
4604 hashval_t
4605 indirect_string_hasher::hash (indirect_string_node *x)
4606 {
4607 return htab_hash_string (x->str);
4608 }
4609
4610 bool
4611 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4612 {
4613 return strcmp (x1->str, x2) == 0;
4614 }
4615
4616 /* Add STR to the given string hash table. */
4617
4618 static struct indirect_string_node *
4619 find_AT_string_in_table (const char *str,
4620 hash_table<indirect_string_hasher> *table)
4621 {
4622 struct indirect_string_node *node;
4623
4624 indirect_string_node **slot
4625 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4626 if (*slot == NULL)
4627 {
4628 node = ggc_cleared_alloc<indirect_string_node> ();
4629 node->str = ggc_strdup (str);
4630 *slot = node;
4631 }
4632 else
4633 node = *slot;
4634
4635 node->refcount++;
4636 return node;
4637 }
4638
4639 /* Add STR to the indirect string hash table. */
4640
4641 static struct indirect_string_node *
4642 find_AT_string (const char *str)
4643 {
4644 if (! debug_str_hash)
4645 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4646
4647 return find_AT_string_in_table (str, debug_str_hash);
4648 }
4649
4650 /* Add a string attribute value to a DIE. */
4651
4652 static inline void
4653 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4654 {
4655 dw_attr_node attr;
4656 struct indirect_string_node *node;
4657
4658 node = find_AT_string (str);
4659
4660 attr.dw_attr = attr_kind;
4661 attr.dw_attr_val.val_class = dw_val_class_str;
4662 attr.dw_attr_val.val_entry = NULL;
4663 attr.dw_attr_val.v.val_str = node;
4664 add_dwarf_attr (die, &attr);
4665 }
4666
4667 static inline const char *
4668 AT_string (dw_attr_node *a)
4669 {
4670 gcc_assert (a && AT_class (a) == dw_val_class_str);
4671 return a->dw_attr_val.v.val_str->str;
4672 }
4673
4674 /* Call this function directly to bypass AT_string_form's logic to put
4675 the string inline in the die. */
4676
4677 static void
4678 set_indirect_string (struct indirect_string_node *node)
4679 {
4680 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4681 /* Already indirect is a no op. */
4682 if (node->form == DW_FORM_strp
4683 || node->form == DW_FORM_line_strp
4684 || node->form == dwarf_FORM (DW_FORM_strx))
4685 {
4686 gcc_assert (node->label);
4687 return;
4688 }
4689 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4690 ++dw2_string_counter;
4691 node->label = xstrdup (label);
4692
4693 if (!dwarf_split_debug_info)
4694 {
4695 node->form = DW_FORM_strp;
4696 node->index = NOT_INDEXED;
4697 }
4698 else
4699 {
4700 node->form = dwarf_FORM (DW_FORM_strx);
4701 node->index = NO_INDEX_ASSIGNED;
4702 }
4703 }
4704
4705 /* A helper function for dwarf2out_finish, called to reset indirect
4706 string decisions done for early LTO dwarf output before fat object
4707 dwarf output. */
4708
4709 int
4710 reset_indirect_string (indirect_string_node **h, void *)
4711 {
4712 struct indirect_string_node *node = *h;
4713 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4714 {
4715 free (node->label);
4716 node->label = NULL;
4717 node->form = (dwarf_form) 0;
4718 node->index = 0;
4719 }
4720 return 1;
4721 }
4722
4723 /* Find out whether a string should be output inline in DIE
4724 or out-of-line in .debug_str section. */
4725
4726 static enum dwarf_form
4727 find_string_form (struct indirect_string_node *node)
4728 {
4729 unsigned int len;
4730
4731 if (node->form)
4732 return node->form;
4733
4734 len = strlen (node->str) + 1;
4735
4736 /* If the string is shorter or equal to the size of the reference, it is
4737 always better to put it inline. */
4738 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4739 return node->form = DW_FORM_string;
4740
4741 /* If we cannot expect the linker to merge strings in .debug_str
4742 section, only put it into .debug_str if it is worth even in this
4743 single module. */
4744 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4745 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4746 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4747 return node->form = DW_FORM_string;
4748
4749 set_indirect_string (node);
4750
4751 return node->form;
4752 }
4753
4754 /* Find out whether the string referenced from the attribute should be
4755 output inline in DIE or out-of-line in .debug_str section. */
4756
4757 static enum dwarf_form
4758 AT_string_form (dw_attr_node *a)
4759 {
4760 gcc_assert (a && AT_class (a) == dw_val_class_str);
4761 return find_string_form (a->dw_attr_val.v.val_str);
4762 }
4763
4764 /* Add a DIE reference attribute value to a DIE. */
4765
4766 static inline void
4767 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4768 {
4769 dw_attr_node attr;
4770 gcc_checking_assert (targ_die != NULL);
4771
4772 /* With LTO we can end up trying to reference something we didn't create
4773 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4774 if (targ_die == NULL)
4775 return;
4776
4777 attr.dw_attr = attr_kind;
4778 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4779 attr.dw_attr_val.val_entry = NULL;
4780 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4781 attr.dw_attr_val.v.val_die_ref.external = 0;
4782 add_dwarf_attr (die, &attr);
4783 }
4784
4785 /* Change DIE reference REF to point to NEW_DIE instead. */
4786
4787 static inline void
4788 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4789 {
4790 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4791 ref->dw_attr_val.v.val_die_ref.die = new_die;
4792 ref->dw_attr_val.v.val_die_ref.external = 0;
4793 }
4794
4795 /* Add an AT_specification attribute to a DIE, and also make the back
4796 pointer from the specification to the definition. */
4797
4798 static inline void
4799 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4800 {
4801 add_AT_die_ref (die, DW_AT_specification, targ_die);
4802 gcc_assert (!targ_die->die_definition);
4803 targ_die->die_definition = die;
4804 }
4805
4806 static inline dw_die_ref
4807 AT_ref (dw_attr_node *a)
4808 {
4809 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4810 return a->dw_attr_val.v.val_die_ref.die;
4811 }
4812
4813 static inline int
4814 AT_ref_external (dw_attr_node *a)
4815 {
4816 if (a && AT_class (a) == dw_val_class_die_ref)
4817 return a->dw_attr_val.v.val_die_ref.external;
4818
4819 return 0;
4820 }
4821
4822 static inline void
4823 set_AT_ref_external (dw_attr_node *a, int i)
4824 {
4825 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4826 a->dw_attr_val.v.val_die_ref.external = i;
4827 }
4828
4829 /* Add an FDE reference attribute value to a DIE. */
4830
4831 static inline void
4832 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4833 {
4834 dw_attr_node attr;
4835
4836 attr.dw_attr = attr_kind;
4837 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4838 attr.dw_attr_val.val_entry = NULL;
4839 attr.dw_attr_val.v.val_fde_index = targ_fde;
4840 add_dwarf_attr (die, &attr);
4841 }
4842
4843 /* Add a location description attribute value to a DIE. */
4844
4845 static inline void
4846 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4847 {
4848 dw_attr_node attr;
4849
4850 attr.dw_attr = attr_kind;
4851 attr.dw_attr_val.val_class = dw_val_class_loc;
4852 attr.dw_attr_val.val_entry = NULL;
4853 attr.dw_attr_val.v.val_loc = loc;
4854 add_dwarf_attr (die, &attr);
4855 }
4856
4857 static inline dw_loc_descr_ref
4858 AT_loc (dw_attr_node *a)
4859 {
4860 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4861 return a->dw_attr_val.v.val_loc;
4862 }
4863
4864 static inline void
4865 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4866 {
4867 dw_attr_node attr;
4868
4869 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4870 return;
4871
4872 attr.dw_attr = attr_kind;
4873 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4874 attr.dw_attr_val.val_entry = NULL;
4875 attr.dw_attr_val.v.val_loc_list = loc_list;
4876 add_dwarf_attr (die, &attr);
4877 have_location_lists = true;
4878 }
4879
4880 static inline dw_loc_list_ref
4881 AT_loc_list (dw_attr_node *a)
4882 {
4883 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4884 return a->dw_attr_val.v.val_loc_list;
4885 }
4886
4887 /* Add a view list attribute to DIE. It must have a DW_AT_location
4888 attribute, because the view list complements the location list. */
4889
4890 static inline void
4891 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4892 {
4893 dw_attr_node attr;
4894
4895 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4896 return;
4897
4898 attr.dw_attr = attr_kind;
4899 attr.dw_attr_val.val_class = dw_val_class_view_list;
4900 attr.dw_attr_val.val_entry = NULL;
4901 attr.dw_attr_val.v.val_view_list = die;
4902 add_dwarf_attr (die, &attr);
4903 gcc_checking_assert (get_AT (die, DW_AT_location));
4904 gcc_assert (have_location_lists);
4905 }
4906
4907 /* Return a pointer to the location list referenced by the attribute.
4908 If the named attribute is a view list, look up the corresponding
4909 DW_AT_location attribute and return its location list. */
4910
4911 static inline dw_loc_list_ref *
4912 AT_loc_list_ptr (dw_attr_node *a)
4913 {
4914 gcc_assert (a);
4915 switch (AT_class (a))
4916 {
4917 case dw_val_class_loc_list:
4918 return &a->dw_attr_val.v.val_loc_list;
4919 case dw_val_class_view_list:
4920 {
4921 dw_attr_node *l;
4922 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4923 if (!l)
4924 return NULL;
4925 gcc_checking_assert (l + 1 == a);
4926 return AT_loc_list_ptr (l);
4927 }
4928 default:
4929 gcc_unreachable ();
4930 }
4931 }
4932
4933 /* Return the location attribute value associated with a view list
4934 attribute value. */
4935
4936 static inline dw_val_node *
4937 view_list_to_loc_list_val_node (dw_val_node *val)
4938 {
4939 gcc_assert (val->val_class == dw_val_class_view_list);
4940 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4941 if (!loc)
4942 return NULL;
4943 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4944 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4945 return &loc->dw_attr_val;
4946 }
4947
4948 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4949 {
4950 static hashval_t hash (addr_table_entry *);
4951 static bool equal (addr_table_entry *, addr_table_entry *);
4952 };
4953
4954 /* Table of entries into the .debug_addr section. */
4955
4956 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4957
4958 /* Hash an address_table_entry. */
4959
4960 hashval_t
4961 addr_hasher::hash (addr_table_entry *a)
4962 {
4963 inchash::hash hstate;
4964 switch (a->kind)
4965 {
4966 case ate_kind_rtx:
4967 hstate.add_int (0);
4968 break;
4969 case ate_kind_rtx_dtprel:
4970 hstate.add_int (1);
4971 break;
4972 case ate_kind_label:
4973 return htab_hash_string (a->addr.label);
4974 default:
4975 gcc_unreachable ();
4976 }
4977 inchash::add_rtx (a->addr.rtl, hstate);
4978 return hstate.end ();
4979 }
4980
4981 /* Determine equality for two address_table_entries. */
4982
4983 bool
4984 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4985 {
4986 if (a1->kind != a2->kind)
4987 return 0;
4988 switch (a1->kind)
4989 {
4990 case ate_kind_rtx:
4991 case ate_kind_rtx_dtprel:
4992 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4993 case ate_kind_label:
4994 return strcmp (a1->addr.label, a2->addr.label) == 0;
4995 default:
4996 gcc_unreachable ();
4997 }
4998 }
4999
5000 /* Initialize an addr_table_entry. */
5001
5002 void
5003 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
5004 {
5005 e->kind = kind;
5006 switch (kind)
5007 {
5008 case ate_kind_rtx:
5009 case ate_kind_rtx_dtprel:
5010 e->addr.rtl = (rtx) addr;
5011 break;
5012 case ate_kind_label:
5013 e->addr.label = (char *) addr;
5014 break;
5015 }
5016 e->refcount = 0;
5017 e->index = NO_INDEX_ASSIGNED;
5018 }
5019
5020 /* Add attr to the address table entry to the table. Defer setting an
5021 index until output time. */
5022
5023 static addr_table_entry *
5024 add_addr_table_entry (void *addr, enum ate_kind kind)
5025 {
5026 addr_table_entry *node;
5027 addr_table_entry finder;
5028
5029 gcc_assert (dwarf_split_debug_info);
5030 if (! addr_index_table)
5031 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5032 init_addr_table_entry (&finder, kind, addr);
5033 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5034
5035 if (*slot == HTAB_EMPTY_ENTRY)
5036 {
5037 node = ggc_cleared_alloc<addr_table_entry> ();
5038 init_addr_table_entry (node, kind, addr);
5039 *slot = node;
5040 }
5041 else
5042 node = *slot;
5043
5044 node->refcount++;
5045 return node;
5046 }
5047
5048 /* Remove an entry from the addr table by decrementing its refcount.
5049 Strictly, decrementing the refcount would be enough, but the
5050 assertion that the entry is actually in the table has found
5051 bugs. */
5052
5053 static void
5054 remove_addr_table_entry (addr_table_entry *entry)
5055 {
5056 gcc_assert (dwarf_split_debug_info && addr_index_table);
5057 /* After an index is assigned, the table is frozen. */
5058 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5059 entry->refcount--;
5060 }
5061
5062 /* Given a location list, remove all addresses it refers to from the
5063 address_table. */
5064
5065 static void
5066 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5067 {
5068 for (; descr; descr = descr->dw_loc_next)
5069 if (descr->dw_loc_oprnd1.val_entry != NULL)
5070 {
5071 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5072 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5073 }
5074 }
5075
5076 /* A helper function for dwarf2out_finish called through
5077 htab_traverse. Assign an addr_table_entry its index. All entries
5078 must be collected into the table when this function is called,
5079 because the indexing code relies on htab_traverse to traverse nodes
5080 in the same order for each run. */
5081
5082 int
5083 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5084 {
5085 addr_table_entry *node = *h;
5086
5087 /* Don't index unreferenced nodes. */
5088 if (node->refcount == 0)
5089 return 1;
5090
5091 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5092 node->index = *index;
5093 *index += 1;
5094
5095 return 1;
5096 }
5097
5098 /* Add an address constant attribute value to a DIE. When using
5099 dwarf_split_debug_info, address attributes in dies destined for the
5100 final executable should be direct references--setting the parameter
5101 force_direct ensures this behavior. */
5102
5103 static inline void
5104 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5105 bool force_direct)
5106 {
5107 dw_attr_node attr;
5108
5109 attr.dw_attr = attr_kind;
5110 attr.dw_attr_val.val_class = dw_val_class_addr;
5111 attr.dw_attr_val.v.val_addr = addr;
5112 if (dwarf_split_debug_info && !force_direct)
5113 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5114 else
5115 attr.dw_attr_val.val_entry = NULL;
5116 add_dwarf_attr (die, &attr);
5117 }
5118
5119 /* Get the RTX from to an address DIE attribute. */
5120
5121 static inline rtx
5122 AT_addr (dw_attr_node *a)
5123 {
5124 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5125 return a->dw_attr_val.v.val_addr;
5126 }
5127
5128 /* Add a file attribute value to a DIE. */
5129
5130 static inline void
5131 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5132 struct dwarf_file_data *fd)
5133 {
5134 dw_attr_node attr;
5135
5136 attr.dw_attr = attr_kind;
5137 attr.dw_attr_val.val_class = dw_val_class_file;
5138 attr.dw_attr_val.val_entry = NULL;
5139 attr.dw_attr_val.v.val_file = fd;
5140 add_dwarf_attr (die, &attr);
5141 }
5142
5143 /* Get the dwarf_file_data from a file DIE attribute. */
5144
5145 static inline struct dwarf_file_data *
5146 AT_file (dw_attr_node *a)
5147 {
5148 gcc_assert (a && (AT_class (a) == dw_val_class_file
5149 || AT_class (a) == dw_val_class_file_implicit));
5150 return a->dw_attr_val.v.val_file;
5151 }
5152
5153 /* Add a vms delta attribute value to a DIE. */
5154
5155 static inline void
5156 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5157 const char *lbl1, const char *lbl2)
5158 {
5159 dw_attr_node attr;
5160
5161 attr.dw_attr = attr_kind;
5162 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5163 attr.dw_attr_val.val_entry = NULL;
5164 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5165 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5166 add_dwarf_attr (die, &attr);
5167 }
5168
5169 /* Add a symbolic view identifier attribute value to a DIE. */
5170
5171 static inline void
5172 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5173 const char *view_label)
5174 {
5175 dw_attr_node attr;
5176
5177 attr.dw_attr = attr_kind;
5178 attr.dw_attr_val.val_class = dw_val_class_symview;
5179 attr.dw_attr_val.val_entry = NULL;
5180 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5181 add_dwarf_attr (die, &attr);
5182 }
5183
5184 /* Add a label identifier attribute value to a DIE. */
5185
5186 static inline void
5187 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5188 const char *lbl_id)
5189 {
5190 dw_attr_node attr;
5191
5192 attr.dw_attr = attr_kind;
5193 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5194 attr.dw_attr_val.val_entry = NULL;
5195 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5196 if (dwarf_split_debug_info)
5197 attr.dw_attr_val.val_entry
5198 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5199 ate_kind_label);
5200 add_dwarf_attr (die, &attr);
5201 }
5202
5203 /* Add a section offset attribute value to a DIE, an offset into the
5204 debug_line section. */
5205
5206 static inline void
5207 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5208 const char *label)
5209 {
5210 dw_attr_node attr;
5211
5212 attr.dw_attr = attr_kind;
5213 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5214 attr.dw_attr_val.val_entry = NULL;
5215 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5216 add_dwarf_attr (die, &attr);
5217 }
5218
5219 /* Add a section offset attribute value to a DIE, an offset into the
5220 debug_loclists section. */
5221
5222 static inline void
5223 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5224 const char *label)
5225 {
5226 dw_attr_node attr;
5227
5228 attr.dw_attr = attr_kind;
5229 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
5230 attr.dw_attr_val.val_entry = NULL;
5231 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5232 add_dwarf_attr (die, &attr);
5233 }
5234
5235 /* Add a section offset attribute value to a DIE, an offset into the
5236 debug_macinfo section. */
5237
5238 static inline void
5239 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5240 const char *label)
5241 {
5242 dw_attr_node attr;
5243
5244 attr.dw_attr = attr_kind;
5245 attr.dw_attr_val.val_class = dw_val_class_macptr;
5246 attr.dw_attr_val.val_entry = NULL;
5247 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5248 add_dwarf_attr (die, &attr);
5249 }
5250
5251 /* Add an offset attribute value to a DIE. */
5252
5253 static inline void
5254 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
5255 unsigned HOST_WIDE_INT offset)
5256 {
5257 dw_attr_node attr;
5258
5259 attr.dw_attr = attr_kind;
5260 attr.dw_attr_val.val_class = dw_val_class_offset;
5261 attr.dw_attr_val.val_entry = NULL;
5262 attr.dw_attr_val.v.val_offset = offset;
5263 add_dwarf_attr (die, &attr);
5264 }
5265
5266 /* Add a range_list attribute value to a DIE. When using
5267 dwarf_split_debug_info, address attributes in dies destined for the
5268 final executable should be direct references--setting the parameter
5269 force_direct ensures this behavior. */
5270
5271 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5272 #define RELOCATED_OFFSET (NULL)
5273
5274 static void
5275 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5276 long unsigned int offset, bool force_direct)
5277 {
5278 dw_attr_node attr;
5279
5280 attr.dw_attr = attr_kind;
5281 attr.dw_attr_val.val_class = dw_val_class_range_list;
5282 /* For the range_list attribute, use val_entry to store whether the
5283 offset should follow split-debug-info or normal semantics. This
5284 value is read in output_range_list_offset. */
5285 if (dwarf_split_debug_info && !force_direct)
5286 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5287 else
5288 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5289 attr.dw_attr_val.v.val_offset = offset;
5290 add_dwarf_attr (die, &attr);
5291 }
5292
5293 /* Return the start label of a delta attribute. */
5294
5295 static inline const char *
5296 AT_vms_delta1 (dw_attr_node *a)
5297 {
5298 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5299 return a->dw_attr_val.v.val_vms_delta.lbl1;
5300 }
5301
5302 /* Return the end label of a delta attribute. */
5303
5304 static inline const char *
5305 AT_vms_delta2 (dw_attr_node *a)
5306 {
5307 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5308 return a->dw_attr_val.v.val_vms_delta.lbl2;
5309 }
5310
5311 static inline const char *
5312 AT_lbl (dw_attr_node *a)
5313 {
5314 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5315 || AT_class (a) == dw_val_class_lineptr
5316 || AT_class (a) == dw_val_class_macptr
5317 || AT_class (a) == dw_val_class_loclistsptr
5318 || AT_class (a) == dw_val_class_high_pc));
5319 return a->dw_attr_val.v.val_lbl_id;
5320 }
5321
5322 /* Get the attribute of type attr_kind. */
5323
5324 static dw_attr_node *
5325 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5326 {
5327 dw_attr_node *a;
5328 unsigned ix;
5329 dw_die_ref spec = NULL;
5330
5331 if (! die)
5332 return NULL;
5333
5334 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5335 if (a->dw_attr == attr_kind)
5336 return a;
5337 else if (a->dw_attr == DW_AT_specification
5338 || a->dw_attr == DW_AT_abstract_origin)
5339 spec = AT_ref (a);
5340
5341 if (spec)
5342 return get_AT (spec, attr_kind);
5343
5344 return NULL;
5345 }
5346
5347 /* Returns the parent of the declaration of DIE. */
5348
5349 static dw_die_ref
5350 get_die_parent (dw_die_ref die)
5351 {
5352 dw_die_ref t;
5353
5354 if (!die)
5355 return NULL;
5356
5357 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5358 || (t = get_AT_ref (die, DW_AT_specification)))
5359 die = t;
5360
5361 return die->die_parent;
5362 }
5363
5364 /* Return the "low pc" attribute value, typically associated with a subprogram
5365 DIE. Return null if the "low pc" attribute is either not present, or if it
5366 cannot be represented as an assembler label identifier. */
5367
5368 static inline const char *
5369 get_AT_low_pc (dw_die_ref die)
5370 {
5371 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5372
5373 return a ? AT_lbl (a) : NULL;
5374 }
5375
5376 /* Return the "high pc" attribute value, typically associated with a subprogram
5377 DIE. Return null if the "high pc" attribute is either not present, or if it
5378 cannot be represented as an assembler label identifier. */
5379
5380 static inline const char *
5381 get_AT_hi_pc (dw_die_ref die)
5382 {
5383 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5384
5385 return a ? AT_lbl (a) : NULL;
5386 }
5387
5388 /* Return the value of the string attribute designated by ATTR_KIND, or
5389 NULL if it is not present. */
5390
5391 static inline const char *
5392 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5393 {
5394 dw_attr_node *a = get_AT (die, attr_kind);
5395
5396 return a ? AT_string (a) : NULL;
5397 }
5398
5399 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5400 if it is not present. */
5401
5402 static inline int
5403 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5404 {
5405 dw_attr_node *a = get_AT (die, attr_kind);
5406
5407 return a ? AT_flag (a) : 0;
5408 }
5409
5410 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5411 if it is not present. */
5412
5413 static inline unsigned
5414 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5415 {
5416 dw_attr_node *a = get_AT (die, attr_kind);
5417
5418 return a ? AT_unsigned (a) : 0;
5419 }
5420
5421 static inline dw_die_ref
5422 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5423 {
5424 dw_attr_node *a = get_AT (die, attr_kind);
5425
5426 return a ? AT_ref (a) : NULL;
5427 }
5428
5429 static inline struct dwarf_file_data *
5430 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5431 {
5432 dw_attr_node *a = get_AT (die, attr_kind);
5433
5434 return a ? AT_file (a) : NULL;
5435 }
5436
5437 /* Return TRUE if the language is C++. */
5438
5439 static inline bool
5440 is_cxx (void)
5441 {
5442 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5443
5444 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5445 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5446 }
5447
5448 /* Return TRUE if DECL was created by the C++ frontend. */
5449
5450 static bool
5451 is_cxx (const_tree decl)
5452 {
5453 if (in_lto_p)
5454 {
5455 const_tree context = get_ultimate_context (decl);
5456 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5457 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5458 }
5459 return is_cxx ();
5460 }
5461
5462 /* Return TRUE if the language is Fortran. */
5463
5464 static inline bool
5465 is_fortran (void)
5466 {
5467 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5468
5469 return (lang == DW_LANG_Fortran77
5470 || lang == DW_LANG_Fortran90
5471 || lang == DW_LANG_Fortran95
5472 || lang == DW_LANG_Fortran03
5473 || lang == DW_LANG_Fortran08);
5474 }
5475
5476 static inline bool
5477 is_fortran (const_tree decl)
5478 {
5479 if (in_lto_p)
5480 {
5481 const_tree context = get_ultimate_context (decl);
5482 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5483 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5484 "GNU Fortran", 11) == 0
5485 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5486 "GNU F77") == 0);
5487 }
5488 return is_fortran ();
5489 }
5490
5491 /* Return TRUE if the language is Ada. */
5492
5493 static inline bool
5494 is_ada (void)
5495 {
5496 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5497
5498 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5499 }
5500
5501 /* Remove the specified attribute if present. Return TRUE if removal
5502 was successful. */
5503
5504 static bool
5505 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5506 {
5507 dw_attr_node *a;
5508 unsigned ix;
5509
5510 if (! die)
5511 return false;
5512
5513 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5514 if (a->dw_attr == attr_kind)
5515 {
5516 if (AT_class (a) == dw_val_class_str)
5517 if (a->dw_attr_val.v.val_str->refcount)
5518 a->dw_attr_val.v.val_str->refcount--;
5519
5520 /* vec::ordered_remove should help reduce the number of abbrevs
5521 that are needed. */
5522 die->die_attr->ordered_remove (ix);
5523 return true;
5524 }
5525 return false;
5526 }
5527
5528 /* Remove CHILD from its parent. PREV must have the property that
5529 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5530
5531 static void
5532 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5533 {
5534 gcc_assert (child->die_parent == prev->die_parent);
5535 gcc_assert (prev->die_sib == child);
5536 if (prev == child)
5537 {
5538 gcc_assert (child->die_parent->die_child == child);
5539 prev = NULL;
5540 }
5541 else
5542 prev->die_sib = child->die_sib;
5543 if (child->die_parent->die_child == child)
5544 child->die_parent->die_child = prev;
5545 child->die_sib = NULL;
5546 }
5547
5548 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5549 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5550
5551 static void
5552 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5553 {
5554 dw_die_ref parent = old_child->die_parent;
5555
5556 gcc_assert (parent == prev->die_parent);
5557 gcc_assert (prev->die_sib == old_child);
5558
5559 new_child->die_parent = parent;
5560 if (prev == old_child)
5561 {
5562 gcc_assert (parent->die_child == old_child);
5563 new_child->die_sib = new_child;
5564 }
5565 else
5566 {
5567 prev->die_sib = new_child;
5568 new_child->die_sib = old_child->die_sib;
5569 }
5570 if (old_child->die_parent->die_child == old_child)
5571 old_child->die_parent->die_child = new_child;
5572 old_child->die_sib = NULL;
5573 }
5574
5575 /* Move all children from OLD_PARENT to NEW_PARENT. */
5576
5577 static void
5578 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5579 {
5580 dw_die_ref c;
5581 new_parent->die_child = old_parent->die_child;
5582 old_parent->die_child = NULL;
5583 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5584 }
5585
5586 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5587 matches TAG. */
5588
5589 static void
5590 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5591 {
5592 dw_die_ref c;
5593
5594 c = die->die_child;
5595 if (c) do {
5596 dw_die_ref prev = c;
5597 c = c->die_sib;
5598 while (c->die_tag == tag)
5599 {
5600 remove_child_with_prev (c, prev);
5601 c->die_parent = NULL;
5602 /* Might have removed every child. */
5603 if (die->die_child == NULL)
5604 return;
5605 c = prev->die_sib;
5606 }
5607 } while (c != die->die_child);
5608 }
5609
5610 /* Add a CHILD_DIE as the last child of DIE. */
5611
5612 static void
5613 add_child_die (dw_die_ref die, dw_die_ref child_die)
5614 {
5615 /* FIXME this should probably be an assert. */
5616 if (! die || ! child_die)
5617 return;
5618 gcc_assert (die != child_die);
5619
5620 child_die->die_parent = die;
5621 if (die->die_child)
5622 {
5623 child_die->die_sib = die->die_child->die_sib;
5624 die->die_child->die_sib = child_die;
5625 }
5626 else
5627 child_die->die_sib = child_die;
5628 die->die_child = child_die;
5629 }
5630
5631 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5632
5633 static void
5634 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5635 dw_die_ref after_die)
5636 {
5637 gcc_assert (die
5638 && child_die
5639 && after_die
5640 && die->die_child
5641 && die != child_die);
5642
5643 child_die->die_parent = die;
5644 child_die->die_sib = after_die->die_sib;
5645 after_die->die_sib = child_die;
5646 if (die->die_child == after_die)
5647 die->die_child = child_die;
5648 }
5649
5650 /* Unassociate CHILD from its parent, and make its parent be
5651 NEW_PARENT. */
5652
5653 static void
5654 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5655 {
5656 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5657 if (p->die_sib == child)
5658 {
5659 remove_child_with_prev (child, p);
5660 break;
5661 }
5662 add_child_die (new_parent, child);
5663 }
5664
5665 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5666 is the specification, to the end of PARENT's list of children.
5667 This is done by removing and re-adding it. */
5668
5669 static void
5670 splice_child_die (dw_die_ref parent, dw_die_ref child)
5671 {
5672 /* We want the declaration DIE from inside the class, not the
5673 specification DIE at toplevel. */
5674 if (child->die_parent != parent)
5675 {
5676 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5677
5678 if (tmp)
5679 child = tmp;
5680 }
5681
5682 gcc_assert (child->die_parent == parent
5683 || (child->die_parent
5684 == get_AT_ref (parent, DW_AT_specification)));
5685
5686 reparent_child (child, parent);
5687 }
5688
5689 /* Create and return a new die with TAG_VALUE as tag. */
5690
5691 static inline dw_die_ref
5692 new_die_raw (enum dwarf_tag tag_value)
5693 {
5694 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5695 die->die_tag = tag_value;
5696 return die;
5697 }
5698
5699 /* Create and return a new die with a parent of PARENT_DIE. If
5700 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5701 associated tree T must be supplied to determine parenthood
5702 later. */
5703
5704 static inline dw_die_ref
5705 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5706 {
5707 dw_die_ref die = new_die_raw (tag_value);
5708
5709 if (parent_die != NULL)
5710 add_child_die (parent_die, die);
5711 else
5712 {
5713 limbo_die_node *limbo_node;
5714
5715 /* No DIEs created after early dwarf should end up in limbo,
5716 because the limbo list should not persist past LTO
5717 streaming. */
5718 if (tag_value != DW_TAG_compile_unit
5719 /* These are allowed because they're generated while
5720 breaking out COMDAT units late. */
5721 && tag_value != DW_TAG_type_unit
5722 && tag_value != DW_TAG_skeleton_unit
5723 && !early_dwarf
5724 /* Allow nested functions to live in limbo because they will
5725 only temporarily live there, as decls_for_scope will fix
5726 them up. */
5727 && (TREE_CODE (t) != FUNCTION_DECL
5728 || !decl_function_context (t))
5729 /* Same as nested functions above but for types. Types that
5730 are local to a function will be fixed in
5731 decls_for_scope. */
5732 && (!RECORD_OR_UNION_TYPE_P (t)
5733 || !TYPE_CONTEXT (t)
5734 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5735 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5736 especially in the ltrans stage, but once we implement LTO
5737 dwarf streaming, we should remove this exception. */
5738 && !in_lto_p)
5739 {
5740 fprintf (stderr, "symbol ended up in limbo too late:");
5741 debug_generic_stmt (t);
5742 gcc_unreachable ();
5743 }
5744
5745 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5746 limbo_node->die = die;
5747 limbo_node->created_for = t;
5748 limbo_node->next = limbo_die_list;
5749 limbo_die_list = limbo_node;
5750 }
5751
5752 return die;
5753 }
5754
5755 /* Return the DIE associated with the given type specifier. */
5756
5757 static inline dw_die_ref
5758 lookup_type_die (tree type)
5759 {
5760 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5761 if (die && die->removed)
5762 {
5763 TYPE_SYMTAB_DIE (type) = NULL;
5764 return NULL;
5765 }
5766 return die;
5767 }
5768
5769 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5770 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5771 anonymous type instead the one of the naming typedef. */
5772
5773 static inline dw_die_ref
5774 strip_naming_typedef (tree type, dw_die_ref type_die)
5775 {
5776 if (type
5777 && TREE_CODE (type) == RECORD_TYPE
5778 && type_die
5779 && type_die->die_tag == DW_TAG_typedef
5780 && is_naming_typedef_decl (TYPE_NAME (type)))
5781 type_die = get_AT_ref (type_die, DW_AT_type);
5782 return type_die;
5783 }
5784
5785 /* Like lookup_type_die, but if type is an anonymous type named by a
5786 typedef[1], return the DIE of the anonymous type instead the one of
5787 the naming typedef. This is because in gen_typedef_die, we did
5788 equate the anonymous struct named by the typedef with the DIE of
5789 the naming typedef. So by default, lookup_type_die on an anonymous
5790 struct yields the DIE of the naming typedef.
5791
5792 [1]: Read the comment of is_naming_typedef_decl to learn about what
5793 a naming typedef is. */
5794
5795 static inline dw_die_ref
5796 lookup_type_die_strip_naming_typedef (tree type)
5797 {
5798 dw_die_ref die = lookup_type_die (type);
5799 return strip_naming_typedef (type, die);
5800 }
5801
5802 /* Equate a DIE to a given type specifier. */
5803
5804 static inline void
5805 equate_type_number_to_die (tree type, dw_die_ref type_die)
5806 {
5807 TYPE_SYMTAB_DIE (type) = type_die;
5808 }
5809
5810 /* Returns a hash value for X (which really is a die_struct). */
5811
5812 inline hashval_t
5813 decl_die_hasher::hash (die_node *x)
5814 {
5815 return (hashval_t) x->decl_id;
5816 }
5817
5818 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5819
5820 inline bool
5821 decl_die_hasher::equal (die_node *x, tree y)
5822 {
5823 return (x->decl_id == DECL_UID (y));
5824 }
5825
5826 /* Return the DIE associated with a given declaration. */
5827
5828 static inline dw_die_ref
5829 lookup_decl_die (tree decl)
5830 {
5831 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5832 NO_INSERT);
5833 if (!die)
5834 return NULL;
5835 if ((*die)->removed)
5836 {
5837 decl_die_table->clear_slot (die);
5838 return NULL;
5839 }
5840 return *die;
5841 }
5842
5843
5844 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5845 style reference. Return true if we found one refering to a DIE for
5846 DECL, otherwise return false. */
5847
5848 static bool
5849 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5850 unsigned HOST_WIDE_INT *off)
5851 {
5852 dw_die_ref die;
5853
5854 if ((flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
5855 && !decl_die_table)
5856 return false;
5857
5858 if (TREE_CODE (decl) == BLOCK)
5859 die = BLOCK_DIE (decl);
5860 else
5861 die = lookup_decl_die (decl);
5862 if (!die)
5863 return false;
5864
5865 /* During WPA stage and incremental linking we currently use DIEs
5866 to store the decl <-> label + offset map. That's quite inefficient
5867 but it works for now. */
5868 if (flag_wpa
5869 || flag_incremental_link == INCREMENTAL_LINK_LTO)
5870 {
5871 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5872 if (!ref)
5873 {
5874 gcc_assert (die == comp_unit_die ());
5875 return false;
5876 }
5877 *off = ref->die_offset;
5878 *sym = ref->die_id.die_symbol;
5879 return true;
5880 }
5881
5882 /* Similar to get_ref_die_offset_label, but using the "correct"
5883 label. */
5884 *off = die->die_offset;
5885 while (die->die_parent)
5886 die = die->die_parent;
5887 /* For the containing CU DIE we compute a die_symbol in
5888 compute_comp_unit_symbol. */
5889 gcc_assert (die->die_tag == DW_TAG_compile_unit
5890 && die->die_id.die_symbol != NULL);
5891 *sym = die->die_id.die_symbol;
5892 return true;
5893 }
5894
5895 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5896
5897 static void
5898 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5899 const char *symbol, HOST_WIDE_INT offset)
5900 {
5901 /* Create a fake DIE that contains the reference. Don't use
5902 new_die because we don't want to end up in the limbo list. */
5903 dw_die_ref ref = new_die_raw (die->die_tag);
5904 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5905 ref->die_offset = offset;
5906 ref->with_offset = 1;
5907 add_AT_die_ref (die, attr_kind, ref);
5908 }
5909
5910 /* Create a DIE for DECL if required and add a reference to a DIE
5911 at SYMBOL + OFFSET which contains attributes dumped early. */
5912
5913 static void
5914 dwarf2out_register_external_die (tree decl, const char *sym,
5915 unsigned HOST_WIDE_INT off)
5916 {
5917 if (debug_info_level == DINFO_LEVEL_NONE)
5918 return;
5919
5920 if ((flag_wpa
5921 || flag_incremental_link == INCREMENTAL_LINK_LTO) && !decl_die_table)
5922 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5923
5924 dw_die_ref die
5925 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5926 gcc_assert (!die);
5927
5928 tree ctx;
5929 dw_die_ref parent = NULL;
5930 /* Need to lookup a DIE for the decls context - the containing
5931 function or translation unit. */
5932 if (TREE_CODE (decl) == BLOCK)
5933 {
5934 ctx = BLOCK_SUPERCONTEXT (decl);
5935 /* ??? We do not output DIEs for all scopes thus skip as
5936 many DIEs as needed. */
5937 while (TREE_CODE (ctx) == BLOCK
5938 && !BLOCK_DIE (ctx))
5939 ctx = BLOCK_SUPERCONTEXT (ctx);
5940 }
5941 else
5942 ctx = DECL_CONTEXT (decl);
5943 /* Peel types in the context stack. */
5944 while (ctx && TYPE_P (ctx))
5945 ctx = TYPE_CONTEXT (ctx);
5946 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5947 if (debug_info_level <= DINFO_LEVEL_TERSE)
5948 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5949 ctx = DECL_CONTEXT (ctx);
5950 if (ctx)
5951 {
5952 if (TREE_CODE (ctx) == BLOCK)
5953 parent = BLOCK_DIE (ctx);
5954 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5955 /* Keep the 1:1 association during WPA. */
5956 && !flag_wpa
5957 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5958 /* Otherwise all late annotations go to the main CU which
5959 imports the original CUs. */
5960 parent = comp_unit_die ();
5961 else if (TREE_CODE (ctx) == FUNCTION_DECL
5962 && TREE_CODE (decl) != FUNCTION_DECL
5963 && TREE_CODE (decl) != PARM_DECL
5964 && TREE_CODE (decl) != RESULT_DECL
5965 && TREE_CODE (decl) != BLOCK)
5966 /* Leave function local entities parent determination to when
5967 we process scope vars. */
5968 ;
5969 else
5970 parent = lookup_decl_die (ctx);
5971 }
5972 else
5973 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5974 Handle this case gracefully by globalizing stuff. */
5975 parent = comp_unit_die ();
5976 /* Create a DIE "stub". */
5977 switch (TREE_CODE (decl))
5978 {
5979 case TRANSLATION_UNIT_DECL:
5980 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
5981 {
5982 die = comp_unit_die ();
5983 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5984 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5985 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5986 to create a DIE for the original CUs. */
5987 return;
5988 }
5989 /* Keep the 1:1 association during WPA. */
5990 die = new_die (DW_TAG_compile_unit, NULL, decl);
5991 break;
5992 case NAMESPACE_DECL:
5993 if (is_fortran (decl))
5994 die = new_die (DW_TAG_module, parent, decl);
5995 else
5996 die = new_die (DW_TAG_namespace, parent, decl);
5997 break;
5998 case FUNCTION_DECL:
5999 die = new_die (DW_TAG_subprogram, parent, decl);
6000 break;
6001 case VAR_DECL:
6002 die = new_die (DW_TAG_variable, parent, decl);
6003 break;
6004 case RESULT_DECL:
6005 die = new_die (DW_TAG_variable, parent, decl);
6006 break;
6007 case PARM_DECL:
6008 die = new_die (DW_TAG_formal_parameter, parent, decl);
6009 break;
6010 case CONST_DECL:
6011 die = new_die (DW_TAG_constant, parent, decl);
6012 break;
6013 case LABEL_DECL:
6014 die = new_die (DW_TAG_label, parent, decl);
6015 break;
6016 case BLOCK:
6017 die = new_die (DW_TAG_lexical_block, parent, decl);
6018 break;
6019 default:
6020 gcc_unreachable ();
6021 }
6022 if (TREE_CODE (decl) == BLOCK)
6023 BLOCK_DIE (decl) = die;
6024 else
6025 equate_decl_number_to_die (decl, die);
6026
6027 /* Add a reference to the DIE providing early debug at $sym + off. */
6028 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6029 }
6030
6031 /* Returns a hash value for X (which really is a var_loc_list). */
6032
6033 inline hashval_t
6034 decl_loc_hasher::hash (var_loc_list *x)
6035 {
6036 return (hashval_t) x->decl_id;
6037 }
6038
6039 /* Return nonzero if decl_id of var_loc_list X is the same as
6040 UID of decl *Y. */
6041
6042 inline bool
6043 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6044 {
6045 return (x->decl_id == DECL_UID (y));
6046 }
6047
6048 /* Return the var_loc list associated with a given declaration. */
6049
6050 static inline var_loc_list *
6051 lookup_decl_loc (const_tree decl)
6052 {
6053 if (!decl_loc_table)
6054 return NULL;
6055 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6056 }
6057
6058 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6059
6060 inline hashval_t
6061 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6062 {
6063 return (hashval_t) x->decl_id;
6064 }
6065
6066 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6067 UID of decl *Y. */
6068
6069 inline bool
6070 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6071 {
6072 return (x->decl_id == DECL_UID (y));
6073 }
6074
6075 /* Equate a DIE to a particular declaration. */
6076
6077 static void
6078 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6079 {
6080 unsigned int decl_id = DECL_UID (decl);
6081
6082 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6083 decl_die->decl_id = decl_id;
6084 }
6085
6086 /* Return how many bits covers PIECE EXPR_LIST. */
6087
6088 static HOST_WIDE_INT
6089 decl_piece_bitsize (rtx piece)
6090 {
6091 int ret = (int) GET_MODE (piece);
6092 if (ret)
6093 return ret;
6094 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6095 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6096 return INTVAL (XEXP (XEXP (piece, 0), 0));
6097 }
6098
6099 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6100
6101 static rtx *
6102 decl_piece_varloc_ptr (rtx piece)
6103 {
6104 if ((int) GET_MODE (piece))
6105 return &XEXP (piece, 0);
6106 else
6107 return &XEXP (XEXP (piece, 0), 1);
6108 }
6109
6110 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6111 Next is the chain of following piece nodes. */
6112
6113 static rtx_expr_list *
6114 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6115 {
6116 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6117 return alloc_EXPR_LIST (bitsize, loc_note, next);
6118 else
6119 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6120 GEN_INT (bitsize),
6121 loc_note), next);
6122 }
6123
6124 /* Return rtx that should be stored into loc field for
6125 LOC_NOTE and BITPOS/BITSIZE. */
6126
6127 static rtx
6128 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6129 HOST_WIDE_INT bitsize)
6130 {
6131 if (bitsize != -1)
6132 {
6133 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6134 if (bitpos != 0)
6135 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6136 }
6137 return loc_note;
6138 }
6139
6140 /* This function either modifies location piece list *DEST in
6141 place (if SRC and INNER is NULL), or copies location piece list
6142 *SRC to *DEST while modifying it. Location BITPOS is modified
6143 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6144 not copied and if needed some padding around it is added.
6145 When modifying in place, DEST should point to EXPR_LIST where
6146 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6147 to the start of the whole list and INNER points to the EXPR_LIST
6148 where earlier pieces cover PIECE_BITPOS bits. */
6149
6150 static void
6151 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6152 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6153 HOST_WIDE_INT bitsize, rtx loc_note)
6154 {
6155 HOST_WIDE_INT diff;
6156 bool copy = inner != NULL;
6157
6158 if (copy)
6159 {
6160 /* First copy all nodes preceding the current bitpos. */
6161 while (src != inner)
6162 {
6163 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6164 decl_piece_bitsize (*src), NULL_RTX);
6165 dest = &XEXP (*dest, 1);
6166 src = &XEXP (*src, 1);
6167 }
6168 }
6169 /* Add padding if needed. */
6170 if (bitpos != piece_bitpos)
6171 {
6172 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6173 copy ? NULL_RTX : *dest);
6174 dest = &XEXP (*dest, 1);
6175 }
6176 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6177 {
6178 gcc_assert (!copy);
6179 /* A piece with correct bitpos and bitsize already exist,
6180 just update the location for it and return. */
6181 *decl_piece_varloc_ptr (*dest) = loc_note;
6182 return;
6183 }
6184 /* Add the piece that changed. */
6185 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6186 dest = &XEXP (*dest, 1);
6187 /* Skip over pieces that overlap it. */
6188 diff = bitpos - piece_bitpos + bitsize;
6189 if (!copy)
6190 src = dest;
6191 while (diff > 0 && *src)
6192 {
6193 rtx piece = *src;
6194 diff -= decl_piece_bitsize (piece);
6195 if (copy)
6196 src = &XEXP (piece, 1);
6197 else
6198 {
6199 *src = XEXP (piece, 1);
6200 free_EXPR_LIST_node (piece);
6201 }
6202 }
6203 /* Add padding if needed. */
6204 if (diff < 0 && *src)
6205 {
6206 if (!copy)
6207 dest = src;
6208 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6209 dest = &XEXP (*dest, 1);
6210 }
6211 if (!copy)
6212 return;
6213 /* Finally copy all nodes following it. */
6214 while (*src)
6215 {
6216 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6217 decl_piece_bitsize (*src), NULL_RTX);
6218 dest = &XEXP (*dest, 1);
6219 src = &XEXP (*src, 1);
6220 }
6221 }
6222
6223 /* Add a variable location node to the linked list for DECL. */
6224
6225 static struct var_loc_node *
6226 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6227 {
6228 unsigned int decl_id;
6229 var_loc_list *temp;
6230 struct var_loc_node *loc = NULL;
6231 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6232
6233 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6234 {
6235 tree realdecl = DECL_DEBUG_EXPR (decl);
6236 if (handled_component_p (realdecl)
6237 || (TREE_CODE (realdecl) == MEM_REF
6238 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6239 {
6240 bool reverse;
6241 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6242 &bitsize, &reverse);
6243 if (!innerdecl
6244 || !DECL_P (innerdecl)
6245 || DECL_IGNORED_P (innerdecl)
6246 || TREE_STATIC (innerdecl)
6247 || bitsize == 0
6248 || bitpos + bitsize > 256)
6249 return NULL;
6250 decl = innerdecl;
6251 }
6252 }
6253
6254 decl_id = DECL_UID (decl);
6255 var_loc_list **slot
6256 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6257 if (*slot == NULL)
6258 {
6259 temp = ggc_cleared_alloc<var_loc_list> ();
6260 temp->decl_id = decl_id;
6261 *slot = temp;
6262 }
6263 else
6264 temp = *slot;
6265
6266 /* For PARM_DECLs try to keep around the original incoming value,
6267 even if that means we'll emit a zero-range .debug_loc entry. */
6268 if (temp->last
6269 && temp->first == temp->last
6270 && TREE_CODE (decl) == PARM_DECL
6271 && NOTE_P (temp->first->loc)
6272 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6273 && DECL_INCOMING_RTL (decl)
6274 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6275 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6276 == GET_CODE (DECL_INCOMING_RTL (decl))
6277 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6278 && (bitsize != -1
6279 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6280 NOTE_VAR_LOCATION_LOC (loc_note))
6281 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6282 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6283 {
6284 loc = ggc_cleared_alloc<var_loc_node> ();
6285 temp->first->next = loc;
6286 temp->last = loc;
6287 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6288 }
6289 else if (temp->last)
6290 {
6291 struct var_loc_node *last = temp->last, *unused = NULL;
6292 rtx *piece_loc = NULL, last_loc_note;
6293 HOST_WIDE_INT piece_bitpos = 0;
6294 if (last->next)
6295 {
6296 last = last->next;
6297 gcc_assert (last->next == NULL);
6298 }
6299 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6300 {
6301 piece_loc = &last->loc;
6302 do
6303 {
6304 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6305 if (piece_bitpos + cur_bitsize > bitpos)
6306 break;
6307 piece_bitpos += cur_bitsize;
6308 piece_loc = &XEXP (*piece_loc, 1);
6309 }
6310 while (*piece_loc);
6311 }
6312 /* TEMP->LAST here is either pointer to the last but one or
6313 last element in the chained list, LAST is pointer to the
6314 last element. */
6315 if (label && strcmp (last->label, label) == 0 && last->view == view)
6316 {
6317 /* For SRA optimized variables if there weren't any real
6318 insns since last note, just modify the last node. */
6319 if (piece_loc != NULL)
6320 {
6321 adjust_piece_list (piece_loc, NULL, NULL,
6322 bitpos, piece_bitpos, bitsize, loc_note);
6323 return NULL;
6324 }
6325 /* If the last note doesn't cover any instructions, remove it. */
6326 if (temp->last != last)
6327 {
6328 temp->last->next = NULL;
6329 unused = last;
6330 last = temp->last;
6331 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6332 }
6333 else
6334 {
6335 gcc_assert (temp->first == temp->last
6336 || (temp->first->next == temp->last
6337 && TREE_CODE (decl) == PARM_DECL));
6338 memset (temp->last, '\0', sizeof (*temp->last));
6339 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6340 return temp->last;
6341 }
6342 }
6343 if (bitsize == -1 && NOTE_P (last->loc))
6344 last_loc_note = last->loc;
6345 else if (piece_loc != NULL
6346 && *piece_loc != NULL_RTX
6347 && piece_bitpos == bitpos
6348 && decl_piece_bitsize (*piece_loc) == bitsize)
6349 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6350 else
6351 last_loc_note = NULL_RTX;
6352 /* If the current location is the same as the end of the list,
6353 and either both or neither of the locations is uninitialized,
6354 we have nothing to do. */
6355 if (last_loc_note == NULL_RTX
6356 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6357 NOTE_VAR_LOCATION_LOC (loc_note)))
6358 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6359 != NOTE_VAR_LOCATION_STATUS (loc_note))
6360 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6361 == VAR_INIT_STATUS_UNINITIALIZED)
6362 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6363 == VAR_INIT_STATUS_UNINITIALIZED))))
6364 {
6365 /* Add LOC to the end of list and update LAST. If the last
6366 element of the list has been removed above, reuse its
6367 memory for the new node, otherwise allocate a new one. */
6368 if (unused)
6369 {
6370 loc = unused;
6371 memset (loc, '\0', sizeof (*loc));
6372 }
6373 else
6374 loc = ggc_cleared_alloc<var_loc_node> ();
6375 if (bitsize == -1 || piece_loc == NULL)
6376 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6377 else
6378 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6379 bitpos, piece_bitpos, bitsize, loc_note);
6380 last->next = loc;
6381 /* Ensure TEMP->LAST will point either to the new last but one
6382 element of the chain, or to the last element in it. */
6383 if (last != temp->last)
6384 temp->last = last;
6385 }
6386 else if (unused)
6387 ggc_free (unused);
6388 }
6389 else
6390 {
6391 loc = ggc_cleared_alloc<var_loc_node> ();
6392 temp->first = loc;
6393 temp->last = loc;
6394 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6395 }
6396 return loc;
6397 }
6398 \f
6399 /* Keep track of the number of spaces used to indent the
6400 output of the debugging routines that print the structure of
6401 the DIE internal representation. */
6402 static int print_indent;
6403
6404 /* Indent the line the number of spaces given by print_indent. */
6405
6406 static inline void
6407 print_spaces (FILE *outfile)
6408 {
6409 fprintf (outfile, "%*s", print_indent, "");
6410 }
6411
6412 /* Print a type signature in hex. */
6413
6414 static inline void
6415 print_signature (FILE *outfile, char *sig)
6416 {
6417 int i;
6418
6419 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6420 fprintf (outfile, "%02x", sig[i] & 0xff);
6421 }
6422
6423 static inline void
6424 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6425 {
6426 if (discr_value->pos)
6427 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6428 else
6429 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6430 }
6431
6432 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6433
6434 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6435 RECURSE, output location descriptor operations. */
6436
6437 static void
6438 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6439 {
6440 switch (val->val_class)
6441 {
6442 case dw_val_class_addr:
6443 fprintf (outfile, "address");
6444 break;
6445 case dw_val_class_offset:
6446 fprintf (outfile, "offset");
6447 break;
6448 case dw_val_class_loc:
6449 fprintf (outfile, "location descriptor");
6450 if (val->v.val_loc == NULL)
6451 fprintf (outfile, " -> <null>\n");
6452 else if (recurse)
6453 {
6454 fprintf (outfile, ":\n");
6455 print_indent += 4;
6456 print_loc_descr (val->v.val_loc, outfile);
6457 print_indent -= 4;
6458 }
6459 else
6460 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6461 break;
6462 case dw_val_class_loc_list:
6463 fprintf (outfile, "location list -> label:%s",
6464 val->v.val_loc_list->ll_symbol);
6465 break;
6466 case dw_val_class_view_list:
6467 val = view_list_to_loc_list_val_node (val);
6468 fprintf (outfile, "location list with views -> labels:%s and %s",
6469 val->v.val_loc_list->ll_symbol,
6470 val->v.val_loc_list->vl_symbol);
6471 break;
6472 case dw_val_class_range_list:
6473 fprintf (outfile, "range list");
6474 break;
6475 case dw_val_class_const:
6476 case dw_val_class_const_implicit:
6477 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6478 break;
6479 case dw_val_class_unsigned_const:
6480 case dw_val_class_unsigned_const_implicit:
6481 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6482 break;
6483 case dw_val_class_const_double:
6484 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6485 HOST_WIDE_INT_PRINT_UNSIGNED")",
6486 val->v.val_double.high,
6487 val->v.val_double.low);
6488 break;
6489 case dw_val_class_wide_int:
6490 {
6491 int i = val->v.val_wide->get_len ();
6492 fprintf (outfile, "constant (");
6493 gcc_assert (i > 0);
6494 if (val->v.val_wide->elt (i - 1) == 0)
6495 fprintf (outfile, "0x");
6496 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6497 val->v.val_wide->elt (--i));
6498 while (--i >= 0)
6499 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6500 val->v.val_wide->elt (i));
6501 fprintf (outfile, ")");
6502 break;
6503 }
6504 case dw_val_class_vec:
6505 fprintf (outfile, "floating-point or vector constant");
6506 break;
6507 case dw_val_class_flag:
6508 fprintf (outfile, "%u", val->v.val_flag);
6509 break;
6510 case dw_val_class_die_ref:
6511 if (val->v.val_die_ref.die != NULL)
6512 {
6513 dw_die_ref die = val->v.val_die_ref.die;
6514
6515 if (die->comdat_type_p)
6516 {
6517 fprintf (outfile, "die -> signature: ");
6518 print_signature (outfile,
6519 die->die_id.die_type_node->signature);
6520 }
6521 else if (die->die_id.die_symbol)
6522 {
6523 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6524 if (die->with_offset)
6525 fprintf (outfile, " + %ld", die->die_offset);
6526 }
6527 else
6528 fprintf (outfile, "die -> %ld", die->die_offset);
6529 fprintf (outfile, " (%p)", (void *) die);
6530 }
6531 else
6532 fprintf (outfile, "die -> <null>");
6533 break;
6534 case dw_val_class_vms_delta:
6535 fprintf (outfile, "delta: @slotcount(%s-%s)",
6536 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6537 break;
6538 case dw_val_class_symview:
6539 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6540 break;
6541 case dw_val_class_lbl_id:
6542 case dw_val_class_lineptr:
6543 case dw_val_class_macptr:
6544 case dw_val_class_loclistsptr:
6545 case dw_val_class_high_pc:
6546 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6547 break;
6548 case dw_val_class_str:
6549 if (val->v.val_str->str != NULL)
6550 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6551 else
6552 fprintf (outfile, "<null>");
6553 break;
6554 case dw_val_class_file:
6555 case dw_val_class_file_implicit:
6556 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6557 val->v.val_file->emitted_number);
6558 break;
6559 case dw_val_class_data8:
6560 {
6561 int i;
6562
6563 for (i = 0; i < 8; i++)
6564 fprintf (outfile, "%02x", val->v.val_data8[i]);
6565 break;
6566 }
6567 case dw_val_class_discr_value:
6568 print_discr_value (outfile, &val->v.val_discr_value);
6569 break;
6570 case dw_val_class_discr_list:
6571 for (dw_discr_list_ref node = val->v.val_discr_list;
6572 node != NULL;
6573 node = node->dw_discr_next)
6574 {
6575 if (node->dw_discr_range)
6576 {
6577 fprintf (outfile, " .. ");
6578 print_discr_value (outfile, &node->dw_discr_lower_bound);
6579 print_discr_value (outfile, &node->dw_discr_upper_bound);
6580 }
6581 else
6582 print_discr_value (outfile, &node->dw_discr_lower_bound);
6583
6584 if (node->dw_discr_next != NULL)
6585 fprintf (outfile, " | ");
6586 }
6587 default:
6588 break;
6589 }
6590 }
6591
6592 /* Likewise, for a DIE attribute. */
6593
6594 static void
6595 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6596 {
6597 print_dw_val (&a->dw_attr_val, recurse, outfile);
6598 }
6599
6600
6601 /* Print the list of operands in the LOC location description to OUTFILE. This
6602 routine is a debugging aid only. */
6603
6604 static void
6605 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6606 {
6607 dw_loc_descr_ref l = loc;
6608
6609 if (loc == NULL)
6610 {
6611 print_spaces (outfile);
6612 fprintf (outfile, "<null>\n");
6613 return;
6614 }
6615
6616 for (l = loc; l != NULL; l = l->dw_loc_next)
6617 {
6618 print_spaces (outfile);
6619 fprintf (outfile, "(%p) %s",
6620 (void *) l,
6621 dwarf_stack_op_name (l->dw_loc_opc));
6622 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6623 {
6624 fprintf (outfile, " ");
6625 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6626 }
6627 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6628 {
6629 fprintf (outfile, ", ");
6630 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6631 }
6632 fprintf (outfile, "\n");
6633 }
6634 }
6635
6636 /* Print the information associated with a given DIE, and its children.
6637 This routine is a debugging aid only. */
6638
6639 static void
6640 print_die (dw_die_ref die, FILE *outfile)
6641 {
6642 dw_attr_node *a;
6643 dw_die_ref c;
6644 unsigned ix;
6645
6646 print_spaces (outfile);
6647 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6648 die->die_offset, dwarf_tag_name (die->die_tag),
6649 (void*) die);
6650 print_spaces (outfile);
6651 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6652 fprintf (outfile, " offset: %ld", die->die_offset);
6653 fprintf (outfile, " mark: %d\n", die->die_mark);
6654
6655 if (die->comdat_type_p)
6656 {
6657 print_spaces (outfile);
6658 fprintf (outfile, " signature: ");
6659 print_signature (outfile, die->die_id.die_type_node->signature);
6660 fprintf (outfile, "\n");
6661 }
6662
6663 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6664 {
6665 print_spaces (outfile);
6666 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6667
6668 print_attribute (a, true, outfile);
6669 fprintf (outfile, "\n");
6670 }
6671
6672 if (die->die_child != NULL)
6673 {
6674 print_indent += 4;
6675 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6676 print_indent -= 4;
6677 }
6678 if (print_indent == 0)
6679 fprintf (outfile, "\n");
6680 }
6681
6682 /* Print the list of operations in the LOC location description. */
6683
6684 DEBUG_FUNCTION void
6685 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6686 {
6687 print_loc_descr (loc, stderr);
6688 }
6689
6690 /* Print the information collected for a given DIE. */
6691
6692 DEBUG_FUNCTION void
6693 debug_dwarf_die (dw_die_ref die)
6694 {
6695 print_die (die, stderr);
6696 }
6697
6698 DEBUG_FUNCTION void
6699 debug (die_struct &ref)
6700 {
6701 print_die (&ref, stderr);
6702 }
6703
6704 DEBUG_FUNCTION void
6705 debug (die_struct *ptr)
6706 {
6707 if (ptr)
6708 debug (*ptr);
6709 else
6710 fprintf (stderr, "<nil>\n");
6711 }
6712
6713
6714 /* Print all DWARF information collected for the compilation unit.
6715 This routine is a debugging aid only. */
6716
6717 DEBUG_FUNCTION void
6718 debug_dwarf (void)
6719 {
6720 print_indent = 0;
6721 print_die (comp_unit_die (), stderr);
6722 }
6723
6724 /* Verify the DIE tree structure. */
6725
6726 DEBUG_FUNCTION void
6727 verify_die (dw_die_ref die)
6728 {
6729 gcc_assert (!die->die_mark);
6730 if (die->die_parent == NULL
6731 && die->die_sib == NULL)
6732 return;
6733 /* Verify the die_sib list is cyclic. */
6734 dw_die_ref x = die;
6735 do
6736 {
6737 x->die_mark = 1;
6738 x = x->die_sib;
6739 }
6740 while (x && !x->die_mark);
6741 gcc_assert (x == die);
6742 x = die;
6743 do
6744 {
6745 /* Verify all dies have the same parent. */
6746 gcc_assert (x->die_parent == die->die_parent);
6747 if (x->die_child)
6748 {
6749 /* Verify the child has the proper parent and recurse. */
6750 gcc_assert (x->die_child->die_parent == x);
6751 verify_die (x->die_child);
6752 }
6753 x->die_mark = 0;
6754 x = x->die_sib;
6755 }
6756 while (x && x->die_mark);
6757 }
6758
6759 /* Sanity checks on DIEs. */
6760
6761 static void
6762 check_die (dw_die_ref die)
6763 {
6764 unsigned ix;
6765 dw_attr_node *a;
6766 bool inline_found = false;
6767 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6768 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6769 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6770 {
6771 switch (a->dw_attr)
6772 {
6773 case DW_AT_inline:
6774 if (a->dw_attr_val.v.val_unsigned)
6775 inline_found = true;
6776 break;
6777 case DW_AT_location:
6778 ++n_location;
6779 break;
6780 case DW_AT_low_pc:
6781 ++n_low_pc;
6782 break;
6783 case DW_AT_high_pc:
6784 ++n_high_pc;
6785 break;
6786 case DW_AT_artificial:
6787 ++n_artificial;
6788 break;
6789 case DW_AT_decl_column:
6790 ++n_decl_column;
6791 break;
6792 case DW_AT_decl_line:
6793 ++n_decl_line;
6794 break;
6795 case DW_AT_decl_file:
6796 ++n_decl_file;
6797 break;
6798 default:
6799 break;
6800 }
6801 }
6802 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6803 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6804 {
6805 fprintf (stderr, "Duplicate attributes in DIE:\n");
6806 debug_dwarf_die (die);
6807 gcc_unreachable ();
6808 }
6809 if (inline_found)
6810 {
6811 /* A debugging information entry that is a member of an abstract
6812 instance tree [that has DW_AT_inline] should not contain any
6813 attributes which describe aspects of the subroutine which vary
6814 between distinct inlined expansions or distinct out-of-line
6815 expansions. */
6816 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6817 gcc_assert (a->dw_attr != DW_AT_low_pc
6818 && a->dw_attr != DW_AT_high_pc
6819 && a->dw_attr != DW_AT_location
6820 && a->dw_attr != DW_AT_frame_base
6821 && a->dw_attr != DW_AT_call_all_calls
6822 && a->dw_attr != DW_AT_GNU_all_call_sites);
6823 }
6824 }
6825 \f
6826 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6827 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6828 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6829
6830 /* Calculate the checksum of a location expression. */
6831
6832 static inline void
6833 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6834 {
6835 int tem;
6836 inchash::hash hstate;
6837 hashval_t hash;
6838
6839 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6840 CHECKSUM (tem);
6841 hash_loc_operands (loc, hstate);
6842 hash = hstate.end();
6843 CHECKSUM (hash);
6844 }
6845
6846 /* Calculate the checksum of an attribute. */
6847
6848 static void
6849 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6850 {
6851 dw_loc_descr_ref loc;
6852 rtx r;
6853
6854 CHECKSUM (at->dw_attr);
6855
6856 /* We don't care that this was compiled with a different compiler
6857 snapshot; if the output is the same, that's what matters. */
6858 if (at->dw_attr == DW_AT_producer)
6859 return;
6860
6861 switch (AT_class (at))
6862 {
6863 case dw_val_class_const:
6864 case dw_val_class_const_implicit:
6865 CHECKSUM (at->dw_attr_val.v.val_int);
6866 break;
6867 case dw_val_class_unsigned_const:
6868 case dw_val_class_unsigned_const_implicit:
6869 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6870 break;
6871 case dw_val_class_const_double:
6872 CHECKSUM (at->dw_attr_val.v.val_double);
6873 break;
6874 case dw_val_class_wide_int:
6875 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6876 get_full_len (*at->dw_attr_val.v.val_wide)
6877 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6878 break;
6879 case dw_val_class_vec:
6880 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6881 (at->dw_attr_val.v.val_vec.length
6882 * at->dw_attr_val.v.val_vec.elt_size));
6883 break;
6884 case dw_val_class_flag:
6885 CHECKSUM (at->dw_attr_val.v.val_flag);
6886 break;
6887 case dw_val_class_str:
6888 CHECKSUM_STRING (AT_string (at));
6889 break;
6890
6891 case dw_val_class_addr:
6892 r = AT_addr (at);
6893 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6894 CHECKSUM_STRING (XSTR (r, 0));
6895 break;
6896
6897 case dw_val_class_offset:
6898 CHECKSUM (at->dw_attr_val.v.val_offset);
6899 break;
6900
6901 case dw_val_class_loc:
6902 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6903 loc_checksum (loc, ctx);
6904 break;
6905
6906 case dw_val_class_die_ref:
6907 die_checksum (AT_ref (at), ctx, mark);
6908 break;
6909
6910 case dw_val_class_fde_ref:
6911 case dw_val_class_vms_delta:
6912 case dw_val_class_symview:
6913 case dw_val_class_lbl_id:
6914 case dw_val_class_lineptr:
6915 case dw_val_class_macptr:
6916 case dw_val_class_loclistsptr:
6917 case dw_val_class_high_pc:
6918 break;
6919
6920 case dw_val_class_file:
6921 case dw_val_class_file_implicit:
6922 CHECKSUM_STRING (AT_file (at)->filename);
6923 break;
6924
6925 case dw_val_class_data8:
6926 CHECKSUM (at->dw_attr_val.v.val_data8);
6927 break;
6928
6929 default:
6930 break;
6931 }
6932 }
6933
6934 /* Calculate the checksum of a DIE. */
6935
6936 static void
6937 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6938 {
6939 dw_die_ref c;
6940 dw_attr_node *a;
6941 unsigned ix;
6942
6943 /* To avoid infinite recursion. */
6944 if (die->die_mark)
6945 {
6946 CHECKSUM (die->die_mark);
6947 return;
6948 }
6949 die->die_mark = ++(*mark);
6950
6951 CHECKSUM (die->die_tag);
6952
6953 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6954 attr_checksum (a, ctx, mark);
6955
6956 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6957 }
6958
6959 #undef CHECKSUM
6960 #undef CHECKSUM_BLOCK
6961 #undef CHECKSUM_STRING
6962
6963 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6964 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6965 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6966 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6967 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6968 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6969 #define CHECKSUM_ATTR(FOO) \
6970 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6971
6972 /* Calculate the checksum of a number in signed LEB128 format. */
6973
6974 static void
6975 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6976 {
6977 unsigned char byte;
6978 bool more;
6979
6980 while (1)
6981 {
6982 byte = (value & 0x7f);
6983 value >>= 7;
6984 more = !((value == 0 && (byte & 0x40) == 0)
6985 || (value == -1 && (byte & 0x40) != 0));
6986 if (more)
6987 byte |= 0x80;
6988 CHECKSUM (byte);
6989 if (!more)
6990 break;
6991 }
6992 }
6993
6994 /* Calculate the checksum of a number in unsigned LEB128 format. */
6995
6996 static void
6997 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6998 {
6999 while (1)
7000 {
7001 unsigned char byte = (value & 0x7f);
7002 value >>= 7;
7003 if (value != 0)
7004 /* More bytes to follow. */
7005 byte |= 0x80;
7006 CHECKSUM (byte);
7007 if (value == 0)
7008 break;
7009 }
7010 }
7011
7012 /* Checksum the context of the DIE. This adds the names of any
7013 surrounding namespaces or structures to the checksum. */
7014
7015 static void
7016 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7017 {
7018 const char *name;
7019 dw_die_ref spec;
7020 int tag = die->die_tag;
7021
7022 if (tag != DW_TAG_namespace
7023 && tag != DW_TAG_structure_type
7024 && tag != DW_TAG_class_type)
7025 return;
7026
7027 name = get_AT_string (die, DW_AT_name);
7028
7029 spec = get_AT_ref (die, DW_AT_specification);
7030 if (spec != NULL)
7031 die = spec;
7032
7033 if (die->die_parent != NULL)
7034 checksum_die_context (die->die_parent, ctx);
7035
7036 CHECKSUM_ULEB128 ('C');
7037 CHECKSUM_ULEB128 (tag);
7038 if (name != NULL)
7039 CHECKSUM_STRING (name);
7040 }
7041
7042 /* Calculate the checksum of a location expression. */
7043
7044 static inline void
7045 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7046 {
7047 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7048 were emitted as a DW_FORM_sdata instead of a location expression. */
7049 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7050 {
7051 CHECKSUM_ULEB128 (DW_FORM_sdata);
7052 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7053 return;
7054 }
7055
7056 /* Otherwise, just checksum the raw location expression. */
7057 while (loc != NULL)
7058 {
7059 inchash::hash hstate;
7060 hashval_t hash;
7061
7062 CHECKSUM_ULEB128 (loc->dtprel);
7063 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7064 hash_loc_operands (loc, hstate);
7065 hash = hstate.end ();
7066 CHECKSUM (hash);
7067 loc = loc->dw_loc_next;
7068 }
7069 }
7070
7071 /* Calculate the checksum of an attribute. */
7072
7073 static void
7074 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7075 struct md5_ctx *ctx, int *mark)
7076 {
7077 dw_loc_descr_ref loc;
7078 rtx r;
7079
7080 if (AT_class (at) == dw_val_class_die_ref)
7081 {
7082 dw_die_ref target_die = AT_ref (at);
7083
7084 /* For pointer and reference types, we checksum only the (qualified)
7085 name of the target type (if there is a name). For friend entries,
7086 we checksum only the (qualified) name of the target type or function.
7087 This allows the checksum to remain the same whether the target type
7088 is complete or not. */
7089 if ((at->dw_attr == DW_AT_type
7090 && (tag == DW_TAG_pointer_type
7091 || tag == DW_TAG_reference_type
7092 || tag == DW_TAG_rvalue_reference_type
7093 || tag == DW_TAG_ptr_to_member_type))
7094 || (at->dw_attr == DW_AT_friend
7095 && tag == DW_TAG_friend))
7096 {
7097 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7098
7099 if (name_attr != NULL)
7100 {
7101 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7102
7103 if (decl == NULL)
7104 decl = target_die;
7105 CHECKSUM_ULEB128 ('N');
7106 CHECKSUM_ULEB128 (at->dw_attr);
7107 if (decl->die_parent != NULL)
7108 checksum_die_context (decl->die_parent, ctx);
7109 CHECKSUM_ULEB128 ('E');
7110 CHECKSUM_STRING (AT_string (name_attr));
7111 return;
7112 }
7113 }
7114
7115 /* For all other references to another DIE, we check to see if the
7116 target DIE has already been visited. If it has, we emit a
7117 backward reference; if not, we descend recursively. */
7118 if (target_die->die_mark > 0)
7119 {
7120 CHECKSUM_ULEB128 ('R');
7121 CHECKSUM_ULEB128 (at->dw_attr);
7122 CHECKSUM_ULEB128 (target_die->die_mark);
7123 }
7124 else
7125 {
7126 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7127
7128 if (decl == NULL)
7129 decl = target_die;
7130 target_die->die_mark = ++(*mark);
7131 CHECKSUM_ULEB128 ('T');
7132 CHECKSUM_ULEB128 (at->dw_attr);
7133 if (decl->die_parent != NULL)
7134 checksum_die_context (decl->die_parent, ctx);
7135 die_checksum_ordered (target_die, ctx, mark);
7136 }
7137 return;
7138 }
7139
7140 CHECKSUM_ULEB128 ('A');
7141 CHECKSUM_ULEB128 (at->dw_attr);
7142
7143 switch (AT_class (at))
7144 {
7145 case dw_val_class_const:
7146 case dw_val_class_const_implicit:
7147 CHECKSUM_ULEB128 (DW_FORM_sdata);
7148 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7149 break;
7150
7151 case dw_val_class_unsigned_const:
7152 case dw_val_class_unsigned_const_implicit:
7153 CHECKSUM_ULEB128 (DW_FORM_sdata);
7154 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7155 break;
7156
7157 case dw_val_class_const_double:
7158 CHECKSUM_ULEB128 (DW_FORM_block);
7159 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7160 CHECKSUM (at->dw_attr_val.v.val_double);
7161 break;
7162
7163 case dw_val_class_wide_int:
7164 CHECKSUM_ULEB128 (DW_FORM_block);
7165 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7166 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7167 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7168 get_full_len (*at->dw_attr_val.v.val_wide)
7169 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7170 break;
7171
7172 case dw_val_class_vec:
7173 CHECKSUM_ULEB128 (DW_FORM_block);
7174 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7175 * at->dw_attr_val.v.val_vec.elt_size);
7176 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7177 (at->dw_attr_val.v.val_vec.length
7178 * at->dw_attr_val.v.val_vec.elt_size));
7179 break;
7180
7181 case dw_val_class_flag:
7182 CHECKSUM_ULEB128 (DW_FORM_flag);
7183 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7184 break;
7185
7186 case dw_val_class_str:
7187 CHECKSUM_ULEB128 (DW_FORM_string);
7188 CHECKSUM_STRING (AT_string (at));
7189 break;
7190
7191 case dw_val_class_addr:
7192 r = AT_addr (at);
7193 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7194 CHECKSUM_ULEB128 (DW_FORM_string);
7195 CHECKSUM_STRING (XSTR (r, 0));
7196 break;
7197
7198 case dw_val_class_offset:
7199 CHECKSUM_ULEB128 (DW_FORM_sdata);
7200 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7201 break;
7202
7203 case dw_val_class_loc:
7204 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7205 loc_checksum_ordered (loc, ctx);
7206 break;
7207
7208 case dw_val_class_fde_ref:
7209 case dw_val_class_symview:
7210 case dw_val_class_lbl_id:
7211 case dw_val_class_lineptr:
7212 case dw_val_class_macptr:
7213 case dw_val_class_loclistsptr:
7214 case dw_val_class_high_pc:
7215 break;
7216
7217 case dw_val_class_file:
7218 case dw_val_class_file_implicit:
7219 CHECKSUM_ULEB128 (DW_FORM_string);
7220 CHECKSUM_STRING (AT_file (at)->filename);
7221 break;
7222
7223 case dw_val_class_data8:
7224 CHECKSUM (at->dw_attr_val.v.val_data8);
7225 break;
7226
7227 default:
7228 break;
7229 }
7230 }
7231
7232 struct checksum_attributes
7233 {
7234 dw_attr_node *at_name;
7235 dw_attr_node *at_type;
7236 dw_attr_node *at_friend;
7237 dw_attr_node *at_accessibility;
7238 dw_attr_node *at_address_class;
7239 dw_attr_node *at_alignment;
7240 dw_attr_node *at_allocated;
7241 dw_attr_node *at_artificial;
7242 dw_attr_node *at_associated;
7243 dw_attr_node *at_binary_scale;
7244 dw_attr_node *at_bit_offset;
7245 dw_attr_node *at_bit_size;
7246 dw_attr_node *at_bit_stride;
7247 dw_attr_node *at_byte_size;
7248 dw_attr_node *at_byte_stride;
7249 dw_attr_node *at_const_value;
7250 dw_attr_node *at_containing_type;
7251 dw_attr_node *at_count;
7252 dw_attr_node *at_data_location;
7253 dw_attr_node *at_data_member_location;
7254 dw_attr_node *at_decimal_scale;
7255 dw_attr_node *at_decimal_sign;
7256 dw_attr_node *at_default_value;
7257 dw_attr_node *at_digit_count;
7258 dw_attr_node *at_discr;
7259 dw_attr_node *at_discr_list;
7260 dw_attr_node *at_discr_value;
7261 dw_attr_node *at_encoding;
7262 dw_attr_node *at_endianity;
7263 dw_attr_node *at_explicit;
7264 dw_attr_node *at_is_optional;
7265 dw_attr_node *at_location;
7266 dw_attr_node *at_lower_bound;
7267 dw_attr_node *at_mutable;
7268 dw_attr_node *at_ordering;
7269 dw_attr_node *at_picture_string;
7270 dw_attr_node *at_prototyped;
7271 dw_attr_node *at_small;
7272 dw_attr_node *at_segment;
7273 dw_attr_node *at_string_length;
7274 dw_attr_node *at_string_length_bit_size;
7275 dw_attr_node *at_string_length_byte_size;
7276 dw_attr_node *at_threads_scaled;
7277 dw_attr_node *at_upper_bound;
7278 dw_attr_node *at_use_location;
7279 dw_attr_node *at_use_UTF8;
7280 dw_attr_node *at_variable_parameter;
7281 dw_attr_node *at_virtuality;
7282 dw_attr_node *at_visibility;
7283 dw_attr_node *at_vtable_elem_location;
7284 };
7285
7286 /* Collect the attributes that we will want to use for the checksum. */
7287
7288 static void
7289 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7290 {
7291 dw_attr_node *a;
7292 unsigned ix;
7293
7294 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7295 {
7296 switch (a->dw_attr)
7297 {
7298 case DW_AT_name:
7299 attrs->at_name = a;
7300 break;
7301 case DW_AT_type:
7302 attrs->at_type = a;
7303 break;
7304 case DW_AT_friend:
7305 attrs->at_friend = a;
7306 break;
7307 case DW_AT_accessibility:
7308 attrs->at_accessibility = a;
7309 break;
7310 case DW_AT_address_class:
7311 attrs->at_address_class = a;
7312 break;
7313 case DW_AT_alignment:
7314 attrs->at_alignment = a;
7315 break;
7316 case DW_AT_allocated:
7317 attrs->at_allocated = a;
7318 break;
7319 case DW_AT_artificial:
7320 attrs->at_artificial = a;
7321 break;
7322 case DW_AT_associated:
7323 attrs->at_associated = a;
7324 break;
7325 case DW_AT_binary_scale:
7326 attrs->at_binary_scale = a;
7327 break;
7328 case DW_AT_bit_offset:
7329 attrs->at_bit_offset = a;
7330 break;
7331 case DW_AT_bit_size:
7332 attrs->at_bit_size = a;
7333 break;
7334 case DW_AT_bit_stride:
7335 attrs->at_bit_stride = a;
7336 break;
7337 case DW_AT_byte_size:
7338 attrs->at_byte_size = a;
7339 break;
7340 case DW_AT_byte_stride:
7341 attrs->at_byte_stride = a;
7342 break;
7343 case DW_AT_const_value:
7344 attrs->at_const_value = a;
7345 break;
7346 case DW_AT_containing_type:
7347 attrs->at_containing_type = a;
7348 break;
7349 case DW_AT_count:
7350 attrs->at_count = a;
7351 break;
7352 case DW_AT_data_location:
7353 attrs->at_data_location = a;
7354 break;
7355 case DW_AT_data_member_location:
7356 attrs->at_data_member_location = a;
7357 break;
7358 case DW_AT_decimal_scale:
7359 attrs->at_decimal_scale = a;
7360 break;
7361 case DW_AT_decimal_sign:
7362 attrs->at_decimal_sign = a;
7363 break;
7364 case DW_AT_default_value:
7365 attrs->at_default_value = a;
7366 break;
7367 case DW_AT_digit_count:
7368 attrs->at_digit_count = a;
7369 break;
7370 case DW_AT_discr:
7371 attrs->at_discr = a;
7372 break;
7373 case DW_AT_discr_list:
7374 attrs->at_discr_list = a;
7375 break;
7376 case DW_AT_discr_value:
7377 attrs->at_discr_value = a;
7378 break;
7379 case DW_AT_encoding:
7380 attrs->at_encoding = a;
7381 break;
7382 case DW_AT_endianity:
7383 attrs->at_endianity = a;
7384 break;
7385 case DW_AT_explicit:
7386 attrs->at_explicit = a;
7387 break;
7388 case DW_AT_is_optional:
7389 attrs->at_is_optional = a;
7390 break;
7391 case DW_AT_location:
7392 attrs->at_location = a;
7393 break;
7394 case DW_AT_lower_bound:
7395 attrs->at_lower_bound = a;
7396 break;
7397 case DW_AT_mutable:
7398 attrs->at_mutable = a;
7399 break;
7400 case DW_AT_ordering:
7401 attrs->at_ordering = a;
7402 break;
7403 case DW_AT_picture_string:
7404 attrs->at_picture_string = a;
7405 break;
7406 case DW_AT_prototyped:
7407 attrs->at_prototyped = a;
7408 break;
7409 case DW_AT_small:
7410 attrs->at_small = a;
7411 break;
7412 case DW_AT_segment:
7413 attrs->at_segment = a;
7414 break;
7415 case DW_AT_string_length:
7416 attrs->at_string_length = a;
7417 break;
7418 case DW_AT_string_length_bit_size:
7419 attrs->at_string_length_bit_size = a;
7420 break;
7421 case DW_AT_string_length_byte_size:
7422 attrs->at_string_length_byte_size = a;
7423 break;
7424 case DW_AT_threads_scaled:
7425 attrs->at_threads_scaled = a;
7426 break;
7427 case DW_AT_upper_bound:
7428 attrs->at_upper_bound = a;
7429 break;
7430 case DW_AT_use_location:
7431 attrs->at_use_location = a;
7432 break;
7433 case DW_AT_use_UTF8:
7434 attrs->at_use_UTF8 = a;
7435 break;
7436 case DW_AT_variable_parameter:
7437 attrs->at_variable_parameter = a;
7438 break;
7439 case DW_AT_virtuality:
7440 attrs->at_virtuality = a;
7441 break;
7442 case DW_AT_visibility:
7443 attrs->at_visibility = a;
7444 break;
7445 case DW_AT_vtable_elem_location:
7446 attrs->at_vtable_elem_location = a;
7447 break;
7448 default:
7449 break;
7450 }
7451 }
7452 }
7453
7454 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7455
7456 static void
7457 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7458 {
7459 dw_die_ref c;
7460 dw_die_ref decl;
7461 struct checksum_attributes attrs;
7462
7463 CHECKSUM_ULEB128 ('D');
7464 CHECKSUM_ULEB128 (die->die_tag);
7465
7466 memset (&attrs, 0, sizeof (attrs));
7467
7468 decl = get_AT_ref (die, DW_AT_specification);
7469 if (decl != NULL)
7470 collect_checksum_attributes (&attrs, decl);
7471 collect_checksum_attributes (&attrs, die);
7472
7473 CHECKSUM_ATTR (attrs.at_name);
7474 CHECKSUM_ATTR (attrs.at_accessibility);
7475 CHECKSUM_ATTR (attrs.at_address_class);
7476 CHECKSUM_ATTR (attrs.at_allocated);
7477 CHECKSUM_ATTR (attrs.at_artificial);
7478 CHECKSUM_ATTR (attrs.at_associated);
7479 CHECKSUM_ATTR (attrs.at_binary_scale);
7480 CHECKSUM_ATTR (attrs.at_bit_offset);
7481 CHECKSUM_ATTR (attrs.at_bit_size);
7482 CHECKSUM_ATTR (attrs.at_bit_stride);
7483 CHECKSUM_ATTR (attrs.at_byte_size);
7484 CHECKSUM_ATTR (attrs.at_byte_stride);
7485 CHECKSUM_ATTR (attrs.at_const_value);
7486 CHECKSUM_ATTR (attrs.at_containing_type);
7487 CHECKSUM_ATTR (attrs.at_count);
7488 CHECKSUM_ATTR (attrs.at_data_location);
7489 CHECKSUM_ATTR (attrs.at_data_member_location);
7490 CHECKSUM_ATTR (attrs.at_decimal_scale);
7491 CHECKSUM_ATTR (attrs.at_decimal_sign);
7492 CHECKSUM_ATTR (attrs.at_default_value);
7493 CHECKSUM_ATTR (attrs.at_digit_count);
7494 CHECKSUM_ATTR (attrs.at_discr);
7495 CHECKSUM_ATTR (attrs.at_discr_list);
7496 CHECKSUM_ATTR (attrs.at_discr_value);
7497 CHECKSUM_ATTR (attrs.at_encoding);
7498 CHECKSUM_ATTR (attrs.at_endianity);
7499 CHECKSUM_ATTR (attrs.at_explicit);
7500 CHECKSUM_ATTR (attrs.at_is_optional);
7501 CHECKSUM_ATTR (attrs.at_location);
7502 CHECKSUM_ATTR (attrs.at_lower_bound);
7503 CHECKSUM_ATTR (attrs.at_mutable);
7504 CHECKSUM_ATTR (attrs.at_ordering);
7505 CHECKSUM_ATTR (attrs.at_picture_string);
7506 CHECKSUM_ATTR (attrs.at_prototyped);
7507 CHECKSUM_ATTR (attrs.at_small);
7508 CHECKSUM_ATTR (attrs.at_segment);
7509 CHECKSUM_ATTR (attrs.at_string_length);
7510 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7511 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7512 CHECKSUM_ATTR (attrs.at_threads_scaled);
7513 CHECKSUM_ATTR (attrs.at_upper_bound);
7514 CHECKSUM_ATTR (attrs.at_use_location);
7515 CHECKSUM_ATTR (attrs.at_use_UTF8);
7516 CHECKSUM_ATTR (attrs.at_variable_parameter);
7517 CHECKSUM_ATTR (attrs.at_virtuality);
7518 CHECKSUM_ATTR (attrs.at_visibility);
7519 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7520 CHECKSUM_ATTR (attrs.at_type);
7521 CHECKSUM_ATTR (attrs.at_friend);
7522 CHECKSUM_ATTR (attrs.at_alignment);
7523
7524 /* Checksum the child DIEs. */
7525 c = die->die_child;
7526 if (c) do {
7527 dw_attr_node *name_attr;
7528
7529 c = c->die_sib;
7530 name_attr = get_AT (c, DW_AT_name);
7531 if (is_template_instantiation (c))
7532 {
7533 /* Ignore instantiations of member type and function templates. */
7534 }
7535 else if (name_attr != NULL
7536 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7537 {
7538 /* Use a shallow checksum for named nested types and member
7539 functions. */
7540 CHECKSUM_ULEB128 ('S');
7541 CHECKSUM_ULEB128 (c->die_tag);
7542 CHECKSUM_STRING (AT_string (name_attr));
7543 }
7544 else
7545 {
7546 /* Use a deep checksum for other children. */
7547 /* Mark this DIE so it gets processed when unmarking. */
7548 if (c->die_mark == 0)
7549 c->die_mark = -1;
7550 die_checksum_ordered (c, ctx, mark);
7551 }
7552 } while (c != die->die_child);
7553
7554 CHECKSUM_ULEB128 (0);
7555 }
7556
7557 /* Add a type name and tag to a hash. */
7558 static void
7559 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7560 {
7561 CHECKSUM_ULEB128 (tag);
7562 CHECKSUM_STRING (name);
7563 }
7564
7565 #undef CHECKSUM
7566 #undef CHECKSUM_STRING
7567 #undef CHECKSUM_ATTR
7568 #undef CHECKSUM_LEB128
7569 #undef CHECKSUM_ULEB128
7570
7571 /* Generate the type signature for DIE. This is computed by generating an
7572 MD5 checksum over the DIE's tag, its relevant attributes, and its
7573 children. Attributes that are references to other DIEs are processed
7574 by recursion, using the MARK field to prevent infinite recursion.
7575 If the DIE is nested inside a namespace or another type, we also
7576 need to include that context in the signature. The lower 64 bits
7577 of the resulting MD5 checksum comprise the signature. */
7578
7579 static void
7580 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7581 {
7582 int mark;
7583 const char *name;
7584 unsigned char checksum[16];
7585 struct md5_ctx ctx;
7586 dw_die_ref decl;
7587 dw_die_ref parent;
7588
7589 name = get_AT_string (die, DW_AT_name);
7590 decl = get_AT_ref (die, DW_AT_specification);
7591 parent = get_die_parent (die);
7592
7593 /* First, compute a signature for just the type name (and its surrounding
7594 context, if any. This is stored in the type unit DIE for link-time
7595 ODR (one-definition rule) checking. */
7596
7597 if (is_cxx () && name != NULL)
7598 {
7599 md5_init_ctx (&ctx);
7600
7601 /* Checksum the names of surrounding namespaces and structures. */
7602 if (parent != NULL)
7603 checksum_die_context (parent, &ctx);
7604
7605 /* Checksum the current DIE. */
7606 die_odr_checksum (die->die_tag, name, &ctx);
7607 md5_finish_ctx (&ctx, checksum);
7608
7609 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7610 }
7611
7612 /* Next, compute the complete type signature. */
7613
7614 md5_init_ctx (&ctx);
7615 mark = 1;
7616 die->die_mark = mark;
7617
7618 /* Checksum the names of surrounding namespaces and structures. */
7619 if (parent != NULL)
7620 checksum_die_context (parent, &ctx);
7621
7622 /* Checksum the DIE and its children. */
7623 die_checksum_ordered (die, &ctx, &mark);
7624 unmark_all_dies (die);
7625 md5_finish_ctx (&ctx, checksum);
7626
7627 /* Store the signature in the type node and link the type DIE and the
7628 type node together. */
7629 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7630 DWARF_TYPE_SIGNATURE_SIZE);
7631 die->comdat_type_p = true;
7632 die->die_id.die_type_node = type_node;
7633 type_node->type_die = die;
7634
7635 /* If the DIE is a specification, link its declaration to the type node
7636 as well. */
7637 if (decl != NULL)
7638 {
7639 decl->comdat_type_p = true;
7640 decl->die_id.die_type_node = type_node;
7641 }
7642 }
7643
7644 /* Do the location expressions look same? */
7645 static inline int
7646 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7647 {
7648 return loc1->dw_loc_opc == loc2->dw_loc_opc
7649 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7650 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7651 }
7652
7653 /* Do the values look the same? */
7654 static int
7655 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7656 {
7657 dw_loc_descr_ref loc1, loc2;
7658 rtx r1, r2;
7659
7660 if (v1->val_class != v2->val_class)
7661 return 0;
7662
7663 switch (v1->val_class)
7664 {
7665 case dw_val_class_const:
7666 case dw_val_class_const_implicit:
7667 return v1->v.val_int == v2->v.val_int;
7668 case dw_val_class_unsigned_const:
7669 case dw_val_class_unsigned_const_implicit:
7670 return v1->v.val_unsigned == v2->v.val_unsigned;
7671 case dw_val_class_const_double:
7672 return v1->v.val_double.high == v2->v.val_double.high
7673 && v1->v.val_double.low == v2->v.val_double.low;
7674 case dw_val_class_wide_int:
7675 return *v1->v.val_wide == *v2->v.val_wide;
7676 case dw_val_class_vec:
7677 if (v1->v.val_vec.length != v2->v.val_vec.length
7678 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7679 return 0;
7680 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7681 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7682 return 0;
7683 return 1;
7684 case dw_val_class_flag:
7685 return v1->v.val_flag == v2->v.val_flag;
7686 case dw_val_class_str:
7687 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7688
7689 case dw_val_class_addr:
7690 r1 = v1->v.val_addr;
7691 r2 = v2->v.val_addr;
7692 if (GET_CODE (r1) != GET_CODE (r2))
7693 return 0;
7694 return !rtx_equal_p (r1, r2);
7695
7696 case dw_val_class_offset:
7697 return v1->v.val_offset == v2->v.val_offset;
7698
7699 case dw_val_class_loc:
7700 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7701 loc1 && loc2;
7702 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7703 if (!same_loc_p (loc1, loc2, mark))
7704 return 0;
7705 return !loc1 && !loc2;
7706
7707 case dw_val_class_die_ref:
7708 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7709
7710 case dw_val_class_symview:
7711 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7712
7713 case dw_val_class_fde_ref:
7714 case dw_val_class_vms_delta:
7715 case dw_val_class_lbl_id:
7716 case dw_val_class_lineptr:
7717 case dw_val_class_macptr:
7718 case dw_val_class_loclistsptr:
7719 case dw_val_class_high_pc:
7720 return 1;
7721
7722 case dw_val_class_file:
7723 case dw_val_class_file_implicit:
7724 return v1->v.val_file == v2->v.val_file;
7725
7726 case dw_val_class_data8:
7727 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7728
7729 default:
7730 return 1;
7731 }
7732 }
7733
7734 /* Do the attributes look the same? */
7735
7736 static int
7737 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7738 {
7739 if (at1->dw_attr != at2->dw_attr)
7740 return 0;
7741
7742 /* We don't care that this was compiled with a different compiler
7743 snapshot; if the output is the same, that's what matters. */
7744 if (at1->dw_attr == DW_AT_producer)
7745 return 1;
7746
7747 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7748 }
7749
7750 /* Do the dies look the same? */
7751
7752 static int
7753 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7754 {
7755 dw_die_ref c1, c2;
7756 dw_attr_node *a1;
7757 unsigned ix;
7758
7759 /* To avoid infinite recursion. */
7760 if (die1->die_mark)
7761 return die1->die_mark == die2->die_mark;
7762 die1->die_mark = die2->die_mark = ++(*mark);
7763
7764 if (die1->die_tag != die2->die_tag)
7765 return 0;
7766
7767 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7768 return 0;
7769
7770 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7771 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7772 return 0;
7773
7774 c1 = die1->die_child;
7775 c2 = die2->die_child;
7776 if (! c1)
7777 {
7778 if (c2)
7779 return 0;
7780 }
7781 else
7782 for (;;)
7783 {
7784 if (!same_die_p (c1, c2, mark))
7785 return 0;
7786 c1 = c1->die_sib;
7787 c2 = c2->die_sib;
7788 if (c1 == die1->die_child)
7789 {
7790 if (c2 == die2->die_child)
7791 break;
7792 else
7793 return 0;
7794 }
7795 }
7796
7797 return 1;
7798 }
7799
7800 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7801 children, and set die_symbol. */
7802
7803 static void
7804 compute_comp_unit_symbol (dw_die_ref unit_die)
7805 {
7806 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7807 const char *base = die_name ? lbasename (die_name) : "anonymous";
7808 char *name = XALLOCAVEC (char, strlen (base) + 64);
7809 char *p;
7810 int i, mark;
7811 unsigned char checksum[16];
7812 struct md5_ctx ctx;
7813
7814 /* Compute the checksum of the DIE, then append part of it as hex digits to
7815 the name filename of the unit. */
7816
7817 md5_init_ctx (&ctx);
7818 mark = 0;
7819 die_checksum (unit_die, &ctx, &mark);
7820 unmark_all_dies (unit_die);
7821 md5_finish_ctx (&ctx, checksum);
7822
7823 /* When we this for comp_unit_die () we have a DW_AT_name that might
7824 not start with a letter but with anything valid for filenames and
7825 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7826 character is not a letter. */
7827 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7828 clean_symbol_name (name);
7829
7830 p = name + strlen (name);
7831 for (i = 0; i < 4; i++)
7832 {
7833 sprintf (p, "%.2x", checksum[i]);
7834 p += 2;
7835 }
7836
7837 unit_die->die_id.die_symbol = xstrdup (name);
7838 }
7839
7840 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7841
7842 static int
7843 is_type_die (dw_die_ref die)
7844 {
7845 switch (die->die_tag)
7846 {
7847 case DW_TAG_array_type:
7848 case DW_TAG_class_type:
7849 case DW_TAG_interface_type:
7850 case DW_TAG_enumeration_type:
7851 case DW_TAG_pointer_type:
7852 case DW_TAG_reference_type:
7853 case DW_TAG_rvalue_reference_type:
7854 case DW_TAG_string_type:
7855 case DW_TAG_structure_type:
7856 case DW_TAG_subroutine_type:
7857 case DW_TAG_union_type:
7858 case DW_TAG_ptr_to_member_type:
7859 case DW_TAG_set_type:
7860 case DW_TAG_subrange_type:
7861 case DW_TAG_base_type:
7862 case DW_TAG_const_type:
7863 case DW_TAG_file_type:
7864 case DW_TAG_packed_type:
7865 case DW_TAG_volatile_type:
7866 case DW_TAG_typedef:
7867 return 1;
7868 default:
7869 return 0;
7870 }
7871 }
7872
7873 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7874 Basically, we want to choose the bits that are likely to be shared between
7875 compilations (types) and leave out the bits that are specific to individual
7876 compilations (functions). */
7877
7878 static int
7879 is_comdat_die (dw_die_ref c)
7880 {
7881 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7882 we do for stabs. The advantage is a greater likelihood of sharing between
7883 objects that don't include headers in the same order (and therefore would
7884 put the base types in a different comdat). jason 8/28/00 */
7885
7886 if (c->die_tag == DW_TAG_base_type)
7887 return 0;
7888
7889 if (c->die_tag == DW_TAG_pointer_type
7890 || c->die_tag == DW_TAG_reference_type
7891 || c->die_tag == DW_TAG_rvalue_reference_type
7892 || c->die_tag == DW_TAG_const_type
7893 || c->die_tag == DW_TAG_volatile_type)
7894 {
7895 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7896
7897 return t ? is_comdat_die (t) : 0;
7898 }
7899
7900 return is_type_die (c);
7901 }
7902
7903 /* Returns true iff C is a compile-unit DIE. */
7904
7905 static inline bool
7906 is_cu_die (dw_die_ref c)
7907 {
7908 return c && (c->die_tag == DW_TAG_compile_unit
7909 || c->die_tag == DW_TAG_skeleton_unit);
7910 }
7911
7912 /* Returns true iff C is a unit DIE of some sort. */
7913
7914 static inline bool
7915 is_unit_die (dw_die_ref c)
7916 {
7917 return c && (c->die_tag == DW_TAG_compile_unit
7918 || c->die_tag == DW_TAG_partial_unit
7919 || c->die_tag == DW_TAG_type_unit
7920 || c->die_tag == DW_TAG_skeleton_unit);
7921 }
7922
7923 /* Returns true iff C is a namespace DIE. */
7924
7925 static inline bool
7926 is_namespace_die (dw_die_ref c)
7927 {
7928 return c && c->die_tag == DW_TAG_namespace;
7929 }
7930
7931 /* Returns true iff C is a class or structure DIE. */
7932
7933 static inline bool
7934 is_class_die (dw_die_ref c)
7935 {
7936 return c && (c->die_tag == DW_TAG_class_type
7937 || c->die_tag == DW_TAG_structure_type);
7938 }
7939
7940 /* Return non-zero if this DIE is a template parameter. */
7941
7942 static inline bool
7943 is_template_parameter (dw_die_ref die)
7944 {
7945 switch (die->die_tag)
7946 {
7947 case DW_TAG_template_type_param:
7948 case DW_TAG_template_value_param:
7949 case DW_TAG_GNU_template_template_param:
7950 case DW_TAG_GNU_template_parameter_pack:
7951 return true;
7952 default:
7953 return false;
7954 }
7955 }
7956
7957 /* Return non-zero if this DIE represents a template instantiation. */
7958
7959 static inline bool
7960 is_template_instantiation (dw_die_ref die)
7961 {
7962 dw_die_ref c;
7963
7964 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7965 return false;
7966 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7967 return false;
7968 }
7969
7970 static char *
7971 gen_internal_sym (const char *prefix)
7972 {
7973 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7974
7975 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7976 return xstrdup (buf);
7977 }
7978
7979 /* Return non-zero if this DIE is a declaration. */
7980
7981 static int
7982 is_declaration_die (dw_die_ref die)
7983 {
7984 dw_attr_node *a;
7985 unsigned ix;
7986
7987 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7988 if (a->dw_attr == DW_AT_declaration)
7989 return 1;
7990
7991 return 0;
7992 }
7993
7994 /* Return non-zero if this DIE is nested inside a subprogram. */
7995
7996 static int
7997 is_nested_in_subprogram (dw_die_ref die)
7998 {
7999 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
8000
8001 if (decl == NULL)
8002 decl = die;
8003 return local_scope_p (decl);
8004 }
8005
8006 /* Return non-zero if this DIE contains a defining declaration of a
8007 subprogram. */
8008
8009 static int
8010 contains_subprogram_definition (dw_die_ref die)
8011 {
8012 dw_die_ref c;
8013
8014 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8015 return 1;
8016 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8017 return 0;
8018 }
8019
8020 /* Return non-zero if this is a type DIE that should be moved to a
8021 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8022 unit type. */
8023
8024 static int
8025 should_move_die_to_comdat (dw_die_ref die)
8026 {
8027 switch (die->die_tag)
8028 {
8029 case DW_TAG_class_type:
8030 case DW_TAG_structure_type:
8031 case DW_TAG_enumeration_type:
8032 case DW_TAG_union_type:
8033 /* Don't move declarations, inlined instances, types nested in a
8034 subprogram, or types that contain subprogram definitions. */
8035 if (is_declaration_die (die)
8036 || get_AT (die, DW_AT_abstract_origin)
8037 || is_nested_in_subprogram (die)
8038 || contains_subprogram_definition (die))
8039 return 0;
8040 return 1;
8041 case DW_TAG_array_type:
8042 case DW_TAG_interface_type:
8043 case DW_TAG_pointer_type:
8044 case DW_TAG_reference_type:
8045 case DW_TAG_rvalue_reference_type:
8046 case DW_TAG_string_type:
8047 case DW_TAG_subroutine_type:
8048 case DW_TAG_ptr_to_member_type:
8049 case DW_TAG_set_type:
8050 case DW_TAG_subrange_type:
8051 case DW_TAG_base_type:
8052 case DW_TAG_const_type:
8053 case DW_TAG_file_type:
8054 case DW_TAG_packed_type:
8055 case DW_TAG_volatile_type:
8056 case DW_TAG_typedef:
8057 default:
8058 return 0;
8059 }
8060 }
8061
8062 /* Make a clone of DIE. */
8063
8064 static dw_die_ref
8065 clone_die (dw_die_ref die)
8066 {
8067 dw_die_ref clone = new_die_raw (die->die_tag);
8068 dw_attr_node *a;
8069 unsigned ix;
8070
8071 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8072 add_dwarf_attr (clone, a);
8073
8074 return clone;
8075 }
8076
8077 /* Make a clone of the tree rooted at DIE. */
8078
8079 static dw_die_ref
8080 clone_tree (dw_die_ref die)
8081 {
8082 dw_die_ref c;
8083 dw_die_ref clone = clone_die (die);
8084
8085 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8086
8087 return clone;
8088 }
8089
8090 /* Make a clone of DIE as a declaration. */
8091
8092 static dw_die_ref
8093 clone_as_declaration (dw_die_ref die)
8094 {
8095 dw_die_ref clone;
8096 dw_die_ref decl;
8097 dw_attr_node *a;
8098 unsigned ix;
8099
8100 /* If the DIE is already a declaration, just clone it. */
8101 if (is_declaration_die (die))
8102 return clone_die (die);
8103
8104 /* If the DIE is a specification, just clone its declaration DIE. */
8105 decl = get_AT_ref (die, DW_AT_specification);
8106 if (decl != NULL)
8107 {
8108 clone = clone_die (decl);
8109 if (die->comdat_type_p)
8110 add_AT_die_ref (clone, DW_AT_signature, die);
8111 return clone;
8112 }
8113
8114 clone = new_die_raw (die->die_tag);
8115
8116 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8117 {
8118 /* We don't want to copy over all attributes.
8119 For example we don't want DW_AT_byte_size because otherwise we will no
8120 longer have a declaration and GDB will treat it as a definition. */
8121
8122 switch (a->dw_attr)
8123 {
8124 case DW_AT_abstract_origin:
8125 case DW_AT_artificial:
8126 case DW_AT_containing_type:
8127 case DW_AT_external:
8128 case DW_AT_name:
8129 case DW_AT_type:
8130 case DW_AT_virtuality:
8131 case DW_AT_linkage_name:
8132 case DW_AT_MIPS_linkage_name:
8133 add_dwarf_attr (clone, a);
8134 break;
8135 case DW_AT_byte_size:
8136 case DW_AT_alignment:
8137 default:
8138 break;
8139 }
8140 }
8141
8142 if (die->comdat_type_p)
8143 add_AT_die_ref (clone, DW_AT_signature, die);
8144
8145 add_AT_flag (clone, DW_AT_declaration, 1);
8146 return clone;
8147 }
8148
8149
8150 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8151
8152 struct decl_table_entry
8153 {
8154 dw_die_ref orig;
8155 dw_die_ref copy;
8156 };
8157
8158 /* Helpers to manipulate hash table of copied declarations. */
8159
8160 /* Hashtable helpers. */
8161
8162 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8163 {
8164 typedef die_struct *compare_type;
8165 static inline hashval_t hash (const decl_table_entry *);
8166 static inline bool equal (const decl_table_entry *, const die_struct *);
8167 };
8168
8169 inline hashval_t
8170 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8171 {
8172 return htab_hash_pointer (entry->orig);
8173 }
8174
8175 inline bool
8176 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8177 const die_struct *entry2)
8178 {
8179 return entry1->orig == entry2;
8180 }
8181
8182 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8183
8184 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8185 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8186 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8187 to check if the ancestor has already been copied into UNIT. */
8188
8189 static dw_die_ref
8190 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8191 decl_hash_type *decl_table)
8192 {
8193 dw_die_ref parent = die->die_parent;
8194 dw_die_ref new_parent = unit;
8195 dw_die_ref copy;
8196 decl_table_entry **slot = NULL;
8197 struct decl_table_entry *entry = NULL;
8198
8199 if (decl_table)
8200 {
8201 /* Check if the entry has already been copied to UNIT. */
8202 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8203 INSERT);
8204 if (*slot != HTAB_EMPTY_ENTRY)
8205 {
8206 entry = *slot;
8207 return entry->copy;
8208 }
8209
8210 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8211 entry = XCNEW (struct decl_table_entry);
8212 entry->orig = die;
8213 entry->copy = NULL;
8214 *slot = entry;
8215 }
8216
8217 if (parent != NULL)
8218 {
8219 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8220 if (spec != NULL)
8221 parent = spec;
8222 if (!is_unit_die (parent))
8223 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8224 }
8225
8226 copy = clone_as_declaration (die);
8227 add_child_die (new_parent, copy);
8228
8229 if (decl_table)
8230 {
8231 /* Record the pointer to the copy. */
8232 entry->copy = copy;
8233 }
8234
8235 return copy;
8236 }
8237 /* Copy the declaration context to the new type unit DIE. This includes
8238 any surrounding namespace or type declarations. If the DIE has an
8239 AT_specification attribute, it also includes attributes and children
8240 attached to the specification, and returns a pointer to the original
8241 parent of the declaration DIE. Returns NULL otherwise. */
8242
8243 static dw_die_ref
8244 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8245 {
8246 dw_die_ref decl;
8247 dw_die_ref new_decl;
8248 dw_die_ref orig_parent = NULL;
8249
8250 decl = get_AT_ref (die, DW_AT_specification);
8251 if (decl == NULL)
8252 decl = die;
8253 else
8254 {
8255 unsigned ix;
8256 dw_die_ref c;
8257 dw_attr_node *a;
8258
8259 /* The original DIE will be changed to a declaration, and must
8260 be moved to be a child of the original declaration DIE. */
8261 orig_parent = decl->die_parent;
8262
8263 /* Copy the type node pointer from the new DIE to the original
8264 declaration DIE so we can forward references later. */
8265 decl->comdat_type_p = true;
8266 decl->die_id.die_type_node = die->die_id.die_type_node;
8267
8268 remove_AT (die, DW_AT_specification);
8269
8270 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8271 {
8272 if (a->dw_attr != DW_AT_name
8273 && a->dw_attr != DW_AT_declaration
8274 && a->dw_attr != DW_AT_external)
8275 add_dwarf_attr (die, a);
8276 }
8277
8278 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8279 }
8280
8281 if (decl->die_parent != NULL
8282 && !is_unit_die (decl->die_parent))
8283 {
8284 new_decl = copy_ancestor_tree (unit, decl, NULL);
8285 if (new_decl != NULL)
8286 {
8287 remove_AT (new_decl, DW_AT_signature);
8288 add_AT_specification (die, new_decl);
8289 }
8290 }
8291
8292 return orig_parent;
8293 }
8294
8295 /* Generate the skeleton ancestor tree for the given NODE, then clone
8296 the DIE and add the clone into the tree. */
8297
8298 static void
8299 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8300 {
8301 if (node->new_die != NULL)
8302 return;
8303
8304 node->new_die = clone_as_declaration (node->old_die);
8305
8306 if (node->parent != NULL)
8307 {
8308 generate_skeleton_ancestor_tree (node->parent);
8309 add_child_die (node->parent->new_die, node->new_die);
8310 }
8311 }
8312
8313 /* Generate a skeleton tree of DIEs containing any declarations that are
8314 found in the original tree. We traverse the tree looking for declaration
8315 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8316
8317 static void
8318 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8319 {
8320 skeleton_chain_node node;
8321 dw_die_ref c;
8322 dw_die_ref first;
8323 dw_die_ref prev = NULL;
8324 dw_die_ref next = NULL;
8325
8326 node.parent = parent;
8327
8328 first = c = parent->old_die->die_child;
8329 if (c)
8330 next = c->die_sib;
8331 if (c) do {
8332 if (prev == NULL || prev->die_sib == c)
8333 prev = c;
8334 c = next;
8335 next = (c == first ? NULL : c->die_sib);
8336 node.old_die = c;
8337 node.new_die = NULL;
8338 if (is_declaration_die (c))
8339 {
8340 if (is_template_instantiation (c))
8341 {
8342 /* Instantiated templates do not need to be cloned into the
8343 type unit. Just move the DIE and its children back to
8344 the skeleton tree (in the main CU). */
8345 remove_child_with_prev (c, prev);
8346 add_child_die (parent->new_die, c);
8347 c = prev;
8348 }
8349 else if (c->comdat_type_p)
8350 {
8351 /* This is the skeleton of earlier break_out_comdat_types
8352 type. Clone the existing DIE, but keep the children
8353 under the original (which is in the main CU). */
8354 dw_die_ref clone = clone_die (c);
8355
8356 replace_child (c, clone, prev);
8357 generate_skeleton_ancestor_tree (parent);
8358 add_child_die (parent->new_die, c);
8359 c = clone;
8360 continue;
8361 }
8362 else
8363 {
8364 /* Clone the existing DIE, move the original to the skeleton
8365 tree (which is in the main CU), and put the clone, with
8366 all the original's children, where the original came from
8367 (which is about to be moved to the type unit). */
8368 dw_die_ref clone = clone_die (c);
8369 move_all_children (c, clone);
8370
8371 /* If the original has a DW_AT_object_pointer attribute,
8372 it would now point to a child DIE just moved to the
8373 cloned tree, so we need to remove that attribute from
8374 the original. */
8375 remove_AT (c, DW_AT_object_pointer);
8376
8377 replace_child (c, clone, prev);
8378 generate_skeleton_ancestor_tree (parent);
8379 add_child_die (parent->new_die, c);
8380 node.old_die = clone;
8381 node.new_die = c;
8382 c = clone;
8383 }
8384 }
8385 generate_skeleton_bottom_up (&node);
8386 } while (next != NULL);
8387 }
8388
8389 /* Wrapper function for generate_skeleton_bottom_up. */
8390
8391 static dw_die_ref
8392 generate_skeleton (dw_die_ref die)
8393 {
8394 skeleton_chain_node node;
8395
8396 node.old_die = die;
8397 node.new_die = NULL;
8398 node.parent = NULL;
8399
8400 /* If this type definition is nested inside another type,
8401 and is not an instantiation of a template, always leave
8402 at least a declaration in its place. */
8403 if (die->die_parent != NULL
8404 && is_type_die (die->die_parent)
8405 && !is_template_instantiation (die))
8406 node.new_die = clone_as_declaration (die);
8407
8408 generate_skeleton_bottom_up (&node);
8409 return node.new_die;
8410 }
8411
8412 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8413 declaration. The original DIE is moved to a new compile unit so that
8414 existing references to it follow it to the new location. If any of the
8415 original DIE's descendants is a declaration, we need to replace the
8416 original DIE with a skeleton tree and move the declarations back into the
8417 skeleton tree. */
8418
8419 static dw_die_ref
8420 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8421 dw_die_ref prev)
8422 {
8423 dw_die_ref skeleton, orig_parent;
8424
8425 /* Copy the declaration context to the type unit DIE. If the returned
8426 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8427 that DIE. */
8428 orig_parent = copy_declaration_context (unit, child);
8429
8430 skeleton = generate_skeleton (child);
8431 if (skeleton == NULL)
8432 remove_child_with_prev (child, prev);
8433 else
8434 {
8435 skeleton->comdat_type_p = true;
8436 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8437
8438 /* If the original DIE was a specification, we need to put
8439 the skeleton under the parent DIE of the declaration.
8440 This leaves the original declaration in the tree, but
8441 it will be pruned later since there are no longer any
8442 references to it. */
8443 if (orig_parent != NULL)
8444 {
8445 remove_child_with_prev (child, prev);
8446 add_child_die (orig_parent, skeleton);
8447 }
8448 else
8449 replace_child (child, skeleton, prev);
8450 }
8451
8452 return skeleton;
8453 }
8454
8455 static void
8456 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8457 comdat_type_node *type_node,
8458 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8459
8460 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8461 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8462 DWARF procedure references in the DW_AT_location attribute. */
8463
8464 static dw_die_ref
8465 copy_dwarf_procedure (dw_die_ref die,
8466 comdat_type_node *type_node,
8467 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8468 {
8469 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8470
8471 /* DWARF procedures are not supposed to have children... */
8472 gcc_assert (die->die_child == NULL);
8473
8474 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8475 gcc_assert (vec_safe_length (die->die_attr) == 1
8476 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8477
8478 /* Do not copy more than once DWARF procedures. */
8479 bool existed;
8480 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8481 if (existed)
8482 return die_copy;
8483
8484 die_copy = clone_die (die);
8485 add_child_die (type_node->root_die, die_copy);
8486 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8487 return die_copy;
8488 }
8489
8490 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8491 procedures in DIE's attributes. */
8492
8493 static void
8494 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8495 comdat_type_node *type_node,
8496 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8497 {
8498 dw_attr_node *a;
8499 unsigned i;
8500
8501 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8502 {
8503 dw_loc_descr_ref loc;
8504
8505 if (a->dw_attr_val.val_class != dw_val_class_loc)
8506 continue;
8507
8508 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8509 {
8510 switch (loc->dw_loc_opc)
8511 {
8512 case DW_OP_call2:
8513 case DW_OP_call4:
8514 case DW_OP_call_ref:
8515 gcc_assert (loc->dw_loc_oprnd1.val_class
8516 == dw_val_class_die_ref);
8517 loc->dw_loc_oprnd1.v.val_die_ref.die
8518 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8519 type_node,
8520 copied_dwarf_procs);
8521
8522 default:
8523 break;
8524 }
8525 }
8526 }
8527 }
8528
8529 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8530 rewrite references to point to the copies.
8531
8532 References are looked for in DIE's attributes and recursively in all its
8533 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8534 mapping from old DWARF procedures to their copy. It is used not to copy
8535 twice the same DWARF procedure under TYPE_NODE. */
8536
8537 static void
8538 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8539 comdat_type_node *type_node,
8540 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8541 {
8542 dw_die_ref c;
8543
8544 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8545 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8546 type_node,
8547 copied_dwarf_procs));
8548 }
8549
8550 /* Traverse the DIE and set up additional .debug_types or .debug_info
8551 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8552 section. */
8553
8554 static void
8555 break_out_comdat_types (dw_die_ref die)
8556 {
8557 dw_die_ref c;
8558 dw_die_ref first;
8559 dw_die_ref prev = NULL;
8560 dw_die_ref next = NULL;
8561 dw_die_ref unit = NULL;
8562
8563 first = c = die->die_child;
8564 if (c)
8565 next = c->die_sib;
8566 if (c) do {
8567 if (prev == NULL || prev->die_sib == c)
8568 prev = c;
8569 c = next;
8570 next = (c == first ? NULL : c->die_sib);
8571 if (should_move_die_to_comdat (c))
8572 {
8573 dw_die_ref replacement;
8574 comdat_type_node *type_node;
8575
8576 /* Break out nested types into their own type units. */
8577 break_out_comdat_types (c);
8578
8579 /* Create a new type unit DIE as the root for the new tree, and
8580 add it to the list of comdat types. */
8581 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8582 add_AT_unsigned (unit, DW_AT_language,
8583 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8584 type_node = ggc_cleared_alloc<comdat_type_node> ();
8585 type_node->root_die = unit;
8586 type_node->next = comdat_type_list;
8587 comdat_type_list = type_node;
8588
8589 /* Generate the type signature. */
8590 generate_type_signature (c, type_node);
8591
8592 /* Copy the declaration context, attributes, and children of the
8593 declaration into the new type unit DIE, then remove this DIE
8594 from the main CU (or replace it with a skeleton if necessary). */
8595 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8596 type_node->skeleton_die = replacement;
8597
8598 /* Add the DIE to the new compunit. */
8599 add_child_die (unit, c);
8600
8601 /* Types can reference DWARF procedures for type size or data location
8602 expressions. Calls in DWARF expressions cannot target procedures
8603 that are not in the same section. So we must copy DWARF procedures
8604 along with this type and then rewrite references to them. */
8605 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8606 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8607
8608 if (replacement != NULL)
8609 c = replacement;
8610 }
8611 else if (c->die_tag == DW_TAG_namespace
8612 || c->die_tag == DW_TAG_class_type
8613 || c->die_tag == DW_TAG_structure_type
8614 || c->die_tag == DW_TAG_union_type)
8615 {
8616 /* Look for nested types that can be broken out. */
8617 break_out_comdat_types (c);
8618 }
8619 } while (next != NULL);
8620 }
8621
8622 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8623 Enter all the cloned children into the hash table decl_table. */
8624
8625 static dw_die_ref
8626 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8627 {
8628 dw_die_ref c;
8629 dw_die_ref clone;
8630 struct decl_table_entry *entry;
8631 decl_table_entry **slot;
8632
8633 if (die->die_tag == DW_TAG_subprogram)
8634 clone = clone_as_declaration (die);
8635 else
8636 clone = clone_die (die);
8637
8638 slot = decl_table->find_slot_with_hash (die,
8639 htab_hash_pointer (die), INSERT);
8640
8641 /* Assert that DIE isn't in the hash table yet. If it would be there
8642 before, the ancestors would be necessarily there as well, therefore
8643 clone_tree_partial wouldn't be called. */
8644 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8645
8646 entry = XCNEW (struct decl_table_entry);
8647 entry->orig = die;
8648 entry->copy = clone;
8649 *slot = entry;
8650
8651 if (die->die_tag != DW_TAG_subprogram)
8652 FOR_EACH_CHILD (die, c,
8653 add_child_die (clone, clone_tree_partial (c, decl_table)));
8654
8655 return clone;
8656 }
8657
8658 /* Walk the DIE and its children, looking for references to incomplete
8659 or trivial types that are unmarked (i.e., that are not in the current
8660 type_unit). */
8661
8662 static void
8663 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8664 {
8665 dw_die_ref c;
8666 dw_attr_node *a;
8667 unsigned ix;
8668
8669 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8670 {
8671 if (AT_class (a) == dw_val_class_die_ref)
8672 {
8673 dw_die_ref targ = AT_ref (a);
8674 decl_table_entry **slot;
8675 struct decl_table_entry *entry;
8676
8677 if (targ->die_mark != 0 || targ->comdat_type_p)
8678 continue;
8679
8680 slot = decl_table->find_slot_with_hash (targ,
8681 htab_hash_pointer (targ),
8682 INSERT);
8683
8684 if (*slot != HTAB_EMPTY_ENTRY)
8685 {
8686 /* TARG has already been copied, so we just need to
8687 modify the reference to point to the copy. */
8688 entry = *slot;
8689 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8690 }
8691 else
8692 {
8693 dw_die_ref parent = unit;
8694 dw_die_ref copy = clone_die (targ);
8695
8696 /* Record in DECL_TABLE that TARG has been copied.
8697 Need to do this now, before the recursive call,
8698 because DECL_TABLE may be expanded and SLOT
8699 would no longer be a valid pointer. */
8700 entry = XCNEW (struct decl_table_entry);
8701 entry->orig = targ;
8702 entry->copy = copy;
8703 *slot = entry;
8704
8705 /* If TARG is not a declaration DIE, we need to copy its
8706 children. */
8707 if (!is_declaration_die (targ))
8708 {
8709 FOR_EACH_CHILD (
8710 targ, c,
8711 add_child_die (copy,
8712 clone_tree_partial (c, decl_table)));
8713 }
8714
8715 /* Make sure the cloned tree is marked as part of the
8716 type unit. */
8717 mark_dies (copy);
8718
8719 /* If TARG has surrounding context, copy its ancestor tree
8720 into the new type unit. */
8721 if (targ->die_parent != NULL
8722 && !is_unit_die (targ->die_parent))
8723 parent = copy_ancestor_tree (unit, targ->die_parent,
8724 decl_table);
8725
8726 add_child_die (parent, copy);
8727 a->dw_attr_val.v.val_die_ref.die = copy;
8728
8729 /* Make sure the newly-copied DIE is walked. If it was
8730 installed in a previously-added context, it won't
8731 get visited otherwise. */
8732 if (parent != unit)
8733 {
8734 /* Find the highest point of the newly-added tree,
8735 mark each node along the way, and walk from there. */
8736 parent->die_mark = 1;
8737 while (parent->die_parent
8738 && parent->die_parent->die_mark == 0)
8739 {
8740 parent = parent->die_parent;
8741 parent->die_mark = 1;
8742 }
8743 copy_decls_walk (unit, parent, decl_table);
8744 }
8745 }
8746 }
8747 }
8748
8749 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8750 }
8751
8752 /* Copy declarations for "unworthy" types into the new comdat section.
8753 Incomplete types, modified types, and certain other types aren't broken
8754 out into comdat sections of their own, so they don't have a signature,
8755 and we need to copy the declaration into the same section so that we
8756 don't have an external reference. */
8757
8758 static void
8759 copy_decls_for_unworthy_types (dw_die_ref unit)
8760 {
8761 mark_dies (unit);
8762 decl_hash_type decl_table (10);
8763 copy_decls_walk (unit, unit, &decl_table);
8764 unmark_dies (unit);
8765 }
8766
8767 /* Traverse the DIE and add a sibling attribute if it may have the
8768 effect of speeding up access to siblings. To save some space,
8769 avoid generating sibling attributes for DIE's without children. */
8770
8771 static void
8772 add_sibling_attributes (dw_die_ref die)
8773 {
8774 dw_die_ref c;
8775
8776 if (! die->die_child)
8777 return;
8778
8779 if (die->die_parent && die != die->die_parent->die_child)
8780 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8781
8782 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8783 }
8784
8785 /* Output all location lists for the DIE and its children. */
8786
8787 static void
8788 output_location_lists (dw_die_ref die)
8789 {
8790 dw_die_ref c;
8791 dw_attr_node *a;
8792 unsigned ix;
8793
8794 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8795 if (AT_class (a) == dw_val_class_loc_list)
8796 output_loc_list (AT_loc_list (a));
8797
8798 FOR_EACH_CHILD (die, c, output_location_lists (c));
8799 }
8800
8801 /* During assign_location_list_indexes and output_loclists_offset the
8802 current index, after it the number of assigned indexes (i.e. how
8803 large the .debug_loclists* offset table should be). */
8804 static unsigned int loc_list_idx;
8805
8806 /* Output all location list offsets for the DIE and its children. */
8807
8808 static void
8809 output_loclists_offsets (dw_die_ref die)
8810 {
8811 dw_die_ref c;
8812 dw_attr_node *a;
8813 unsigned ix;
8814
8815 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8816 if (AT_class (a) == dw_val_class_loc_list)
8817 {
8818 dw_loc_list_ref l = AT_loc_list (a);
8819 if (l->offset_emitted)
8820 continue;
8821 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8822 loc_section_label, NULL);
8823 gcc_assert (l->hash == loc_list_idx);
8824 loc_list_idx++;
8825 l->offset_emitted = true;
8826 }
8827
8828 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8829 }
8830
8831 /* Recursively set indexes of location lists. */
8832
8833 static void
8834 assign_location_list_indexes (dw_die_ref die)
8835 {
8836 dw_die_ref c;
8837 dw_attr_node *a;
8838 unsigned ix;
8839
8840 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8841 if (AT_class (a) == dw_val_class_loc_list)
8842 {
8843 dw_loc_list_ref list = AT_loc_list (a);
8844 if (!list->num_assigned)
8845 {
8846 list->num_assigned = true;
8847 list->hash = loc_list_idx++;
8848 }
8849 }
8850
8851 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8852 }
8853
8854 /* We want to limit the number of external references, because they are
8855 larger than local references: a relocation takes multiple words, and
8856 even a sig8 reference is always eight bytes, whereas a local reference
8857 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8858 So if we encounter multiple external references to the same type DIE, we
8859 make a local typedef stub for it and redirect all references there.
8860
8861 This is the element of the hash table for keeping track of these
8862 references. */
8863
8864 struct external_ref
8865 {
8866 dw_die_ref type;
8867 dw_die_ref stub;
8868 unsigned n_refs;
8869 };
8870
8871 /* Hashtable helpers. */
8872
8873 struct external_ref_hasher : free_ptr_hash <external_ref>
8874 {
8875 static inline hashval_t hash (const external_ref *);
8876 static inline bool equal (const external_ref *, const external_ref *);
8877 };
8878
8879 inline hashval_t
8880 external_ref_hasher::hash (const external_ref *r)
8881 {
8882 dw_die_ref die = r->type;
8883 hashval_t h = 0;
8884
8885 /* We can't use the address of the DIE for hashing, because
8886 that will make the order of the stub DIEs non-deterministic. */
8887 if (! die->comdat_type_p)
8888 /* We have a symbol; use it to compute a hash. */
8889 h = htab_hash_string (die->die_id.die_symbol);
8890 else
8891 {
8892 /* We have a type signature; use a subset of the bits as the hash.
8893 The 8-byte signature is at least as large as hashval_t. */
8894 comdat_type_node *type_node = die->die_id.die_type_node;
8895 memcpy (&h, type_node->signature, sizeof (h));
8896 }
8897 return h;
8898 }
8899
8900 inline bool
8901 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8902 {
8903 return r1->type == r2->type;
8904 }
8905
8906 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8907
8908 /* Return a pointer to the external_ref for references to DIE. */
8909
8910 static struct external_ref *
8911 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8912 {
8913 struct external_ref ref, *ref_p;
8914 external_ref **slot;
8915
8916 ref.type = die;
8917 slot = map->find_slot (&ref, INSERT);
8918 if (*slot != HTAB_EMPTY_ENTRY)
8919 return *slot;
8920
8921 ref_p = XCNEW (struct external_ref);
8922 ref_p->type = die;
8923 *slot = ref_p;
8924 return ref_p;
8925 }
8926
8927 /* Subroutine of optimize_external_refs, below.
8928
8929 If we see a type skeleton, record it as our stub. If we see external
8930 references, remember how many we've seen. */
8931
8932 static void
8933 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8934 {
8935 dw_die_ref c;
8936 dw_attr_node *a;
8937 unsigned ix;
8938 struct external_ref *ref_p;
8939
8940 if (is_type_die (die)
8941 && (c = get_AT_ref (die, DW_AT_signature)))
8942 {
8943 /* This is a local skeleton; use it for local references. */
8944 ref_p = lookup_external_ref (map, c);
8945 ref_p->stub = die;
8946 }
8947
8948 /* Scan the DIE references, and remember any that refer to DIEs from
8949 other CUs (i.e. those which are not marked). */
8950 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8951 if (AT_class (a) == dw_val_class_die_ref
8952 && (c = AT_ref (a))->die_mark == 0
8953 && is_type_die (c))
8954 {
8955 ref_p = lookup_external_ref (map, c);
8956 ref_p->n_refs++;
8957 }
8958
8959 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8960 }
8961
8962 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8963 points to an external_ref, DATA is the CU we're processing. If we don't
8964 already have a local stub, and we have multiple refs, build a stub. */
8965
8966 int
8967 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8968 {
8969 struct external_ref *ref_p = *slot;
8970
8971 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8972 {
8973 /* We have multiple references to this type, so build a small stub.
8974 Both of these forms are a bit dodgy from the perspective of the
8975 DWARF standard, since technically they should have names. */
8976 dw_die_ref cu = data;
8977 dw_die_ref type = ref_p->type;
8978 dw_die_ref stub = NULL;
8979
8980 if (type->comdat_type_p)
8981 {
8982 /* If we refer to this type via sig8, use AT_signature. */
8983 stub = new_die (type->die_tag, cu, NULL_TREE);
8984 add_AT_die_ref (stub, DW_AT_signature, type);
8985 }
8986 else
8987 {
8988 /* Otherwise, use a typedef with no name. */
8989 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8990 add_AT_die_ref (stub, DW_AT_type, type);
8991 }
8992
8993 stub->die_mark++;
8994 ref_p->stub = stub;
8995 }
8996 return 1;
8997 }
8998
8999 /* DIE is a unit; look through all the DIE references to see if there are
9000 any external references to types, and if so, create local stubs for
9001 them which will be applied in build_abbrev_table. This is useful because
9002 references to local DIEs are smaller. */
9003
9004 static external_ref_hash_type *
9005 optimize_external_refs (dw_die_ref die)
9006 {
9007 external_ref_hash_type *map = new external_ref_hash_type (10);
9008 optimize_external_refs_1 (die, map);
9009 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9010 return map;
9011 }
9012
9013 /* The following 3 variables are temporaries that are computed only during the
9014 build_abbrev_table call and used and released during the following
9015 optimize_abbrev_table call. */
9016
9017 /* First abbrev_id that can be optimized based on usage. */
9018 static unsigned int abbrev_opt_start;
9019
9020 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9021 abbrev_id smaller than this, because they must be already sized
9022 during build_abbrev_table). */
9023 static unsigned int abbrev_opt_base_type_end;
9024
9025 /* Vector of usage counts during build_abbrev_table. Indexed by
9026 abbrev_id - abbrev_opt_start. */
9027 static vec<unsigned int> abbrev_usage_count;
9028
9029 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9030 static vec<dw_die_ref> sorted_abbrev_dies;
9031
9032 /* The format of each DIE (and its attribute value pairs) is encoded in an
9033 abbreviation table. This routine builds the abbreviation table and assigns
9034 a unique abbreviation id for each abbreviation entry. The children of each
9035 die are visited recursively. */
9036
9037 static void
9038 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9039 {
9040 unsigned int abbrev_id = 0;
9041 dw_die_ref c;
9042 dw_attr_node *a;
9043 unsigned ix;
9044 dw_die_ref abbrev;
9045
9046 /* Scan the DIE references, and replace any that refer to
9047 DIEs from other CUs (i.e. those which are not marked) with
9048 the local stubs we built in optimize_external_refs. */
9049 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9050 if (AT_class (a) == dw_val_class_die_ref
9051 && (c = AT_ref (a))->die_mark == 0)
9052 {
9053 struct external_ref *ref_p;
9054 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9055
9056 ref_p = lookup_external_ref (extern_map, c);
9057 if (ref_p->stub && ref_p->stub != die)
9058 change_AT_die_ref (a, ref_p->stub);
9059 else
9060 /* We aren't changing this reference, so mark it external. */
9061 set_AT_ref_external (a, 1);
9062 }
9063
9064 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9065 {
9066 dw_attr_node *die_a, *abbrev_a;
9067 unsigned ix;
9068 bool ok = true;
9069
9070 if (abbrev_id == 0)
9071 continue;
9072 if (abbrev->die_tag != die->die_tag)
9073 continue;
9074 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9075 continue;
9076
9077 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9078 continue;
9079
9080 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9081 {
9082 abbrev_a = &(*abbrev->die_attr)[ix];
9083 if ((abbrev_a->dw_attr != die_a->dw_attr)
9084 || (value_format (abbrev_a) != value_format (die_a)))
9085 {
9086 ok = false;
9087 break;
9088 }
9089 }
9090 if (ok)
9091 break;
9092 }
9093
9094 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9095 {
9096 vec_safe_push (abbrev_die_table, die);
9097 if (abbrev_opt_start)
9098 abbrev_usage_count.safe_push (0);
9099 }
9100 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9101 {
9102 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9103 sorted_abbrev_dies.safe_push (die);
9104 }
9105
9106 die->die_abbrev = abbrev_id;
9107 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9108 }
9109
9110 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9111 by die_abbrev's usage count, from the most commonly used
9112 abbreviation to the least. */
9113
9114 static int
9115 die_abbrev_cmp (const void *p1, const void *p2)
9116 {
9117 dw_die_ref die1 = *(const dw_die_ref *) p1;
9118 dw_die_ref die2 = *(const dw_die_ref *) p2;
9119
9120 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9121 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9122
9123 if (die1->die_abbrev >= abbrev_opt_base_type_end
9124 && die2->die_abbrev >= abbrev_opt_base_type_end)
9125 {
9126 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9127 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9128 return -1;
9129 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9130 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9131 return 1;
9132 }
9133
9134 /* Stabilize the sort. */
9135 if (die1->die_abbrev < die2->die_abbrev)
9136 return -1;
9137 if (die1->die_abbrev > die2->die_abbrev)
9138 return 1;
9139
9140 return 0;
9141 }
9142
9143 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9144 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9145 into dw_val_class_const_implicit or
9146 dw_val_class_unsigned_const_implicit. */
9147
9148 static void
9149 optimize_implicit_const (unsigned int first_id, unsigned int end,
9150 vec<bool> &implicit_consts)
9151 {
9152 /* It never makes sense if there is just one DIE using the abbreviation. */
9153 if (end < first_id + 2)
9154 return;
9155
9156 dw_attr_node *a;
9157 unsigned ix, i;
9158 dw_die_ref die = sorted_abbrev_dies[first_id];
9159 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9160 if (implicit_consts[ix])
9161 {
9162 enum dw_val_class new_class = dw_val_class_none;
9163 switch (AT_class (a))
9164 {
9165 case dw_val_class_unsigned_const:
9166 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9167 continue;
9168
9169 /* The .debug_abbrev section will grow by
9170 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9171 in all the DIEs using that abbreviation. */
9172 if (constant_size (AT_unsigned (a)) * (end - first_id)
9173 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9174 continue;
9175
9176 new_class = dw_val_class_unsigned_const_implicit;
9177 break;
9178
9179 case dw_val_class_const:
9180 new_class = dw_val_class_const_implicit;
9181 break;
9182
9183 case dw_val_class_file:
9184 new_class = dw_val_class_file_implicit;
9185 break;
9186
9187 default:
9188 continue;
9189 }
9190 for (i = first_id; i < end; i++)
9191 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9192 = new_class;
9193 }
9194 }
9195
9196 /* Attempt to optimize abbreviation table from abbrev_opt_start
9197 abbreviation above. */
9198
9199 static void
9200 optimize_abbrev_table (void)
9201 {
9202 if (abbrev_opt_start
9203 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9204 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9205 {
9206 auto_vec<bool, 32> implicit_consts;
9207 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9208
9209 unsigned int abbrev_id = abbrev_opt_start - 1;
9210 unsigned int first_id = ~0U;
9211 unsigned int last_abbrev_id = 0;
9212 unsigned int i;
9213 dw_die_ref die;
9214 if (abbrev_opt_base_type_end > abbrev_opt_start)
9215 abbrev_id = abbrev_opt_base_type_end - 1;
9216 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9217 most commonly used abbreviations come first. */
9218 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9219 {
9220 dw_attr_node *a;
9221 unsigned ix;
9222
9223 /* If calc_base_type_die_sizes has been called, the CU and
9224 base types after it can't be optimized, because we've already
9225 calculated their DIE offsets. We've sorted them first. */
9226 if (die->die_abbrev < abbrev_opt_base_type_end)
9227 continue;
9228 if (die->die_abbrev != last_abbrev_id)
9229 {
9230 last_abbrev_id = die->die_abbrev;
9231 if (dwarf_version >= 5 && first_id != ~0U)
9232 optimize_implicit_const (first_id, i, implicit_consts);
9233 abbrev_id++;
9234 (*abbrev_die_table)[abbrev_id] = die;
9235 if (dwarf_version >= 5)
9236 {
9237 first_id = i;
9238 implicit_consts.truncate (0);
9239
9240 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9241 switch (AT_class (a))
9242 {
9243 case dw_val_class_const:
9244 case dw_val_class_unsigned_const:
9245 case dw_val_class_file:
9246 implicit_consts.safe_push (true);
9247 break;
9248 default:
9249 implicit_consts.safe_push (false);
9250 break;
9251 }
9252 }
9253 }
9254 else if (dwarf_version >= 5)
9255 {
9256 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9257 if (!implicit_consts[ix])
9258 continue;
9259 else
9260 {
9261 dw_attr_node *other_a
9262 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9263 if (!dw_val_equal_p (&a->dw_attr_val,
9264 &other_a->dw_attr_val))
9265 implicit_consts[ix] = false;
9266 }
9267 }
9268 die->die_abbrev = abbrev_id;
9269 }
9270 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9271 if (dwarf_version >= 5 && first_id != ~0U)
9272 optimize_implicit_const (first_id, i, implicit_consts);
9273 }
9274
9275 abbrev_opt_start = 0;
9276 abbrev_opt_base_type_end = 0;
9277 abbrev_usage_count.release ();
9278 sorted_abbrev_dies.release ();
9279 }
9280 \f
9281 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9282
9283 static int
9284 constant_size (unsigned HOST_WIDE_INT value)
9285 {
9286 int log;
9287
9288 if (value == 0)
9289 log = 0;
9290 else
9291 log = floor_log2 (value);
9292
9293 log = log / 8;
9294 log = 1 << (floor_log2 (log) + 1);
9295
9296 return log;
9297 }
9298
9299 /* Return the size of a DIE as it is represented in the
9300 .debug_info section. */
9301
9302 static unsigned long
9303 size_of_die (dw_die_ref die)
9304 {
9305 unsigned long size = 0;
9306 dw_attr_node *a;
9307 unsigned ix;
9308 enum dwarf_form form;
9309
9310 size += size_of_uleb128 (die->die_abbrev);
9311 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9312 {
9313 switch (AT_class (a))
9314 {
9315 case dw_val_class_addr:
9316 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9317 {
9318 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9319 size += size_of_uleb128 (AT_index (a));
9320 }
9321 else
9322 size += DWARF2_ADDR_SIZE;
9323 break;
9324 case dw_val_class_offset:
9325 size += DWARF_OFFSET_SIZE;
9326 break;
9327 case dw_val_class_loc:
9328 {
9329 unsigned long lsize = size_of_locs (AT_loc (a));
9330
9331 /* Block length. */
9332 if (dwarf_version >= 4)
9333 size += size_of_uleb128 (lsize);
9334 else
9335 size += constant_size (lsize);
9336 size += lsize;
9337 }
9338 break;
9339 case dw_val_class_loc_list:
9340 case dw_val_class_view_list:
9341 if (dwarf_split_debug_info && dwarf_version >= 5)
9342 {
9343 gcc_assert (AT_loc_list (a)->num_assigned);
9344 size += size_of_uleb128 (AT_loc_list (a)->hash);
9345 }
9346 else
9347 size += DWARF_OFFSET_SIZE;
9348 break;
9349 case dw_val_class_range_list:
9350 if (value_format (a) == DW_FORM_rnglistx)
9351 {
9352 gcc_assert (rnglist_idx);
9353 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9354 size += size_of_uleb128 (r->idx);
9355 }
9356 else
9357 size += DWARF_OFFSET_SIZE;
9358 break;
9359 case dw_val_class_const:
9360 size += size_of_sleb128 (AT_int (a));
9361 break;
9362 case dw_val_class_unsigned_const:
9363 {
9364 int csize = constant_size (AT_unsigned (a));
9365 if (dwarf_version == 3
9366 && a->dw_attr == DW_AT_data_member_location
9367 && csize >= 4)
9368 size += size_of_uleb128 (AT_unsigned (a));
9369 else
9370 size += csize;
9371 }
9372 break;
9373 case dw_val_class_symview:
9374 if (symview_upper_bound <= 0xff)
9375 size += 1;
9376 else if (symview_upper_bound <= 0xffff)
9377 size += 2;
9378 else if (symview_upper_bound <= 0xffffffff)
9379 size += 4;
9380 else
9381 size += 8;
9382 break;
9383 case dw_val_class_const_implicit:
9384 case dw_val_class_unsigned_const_implicit:
9385 case dw_val_class_file_implicit:
9386 /* These occupy no size in the DIE, just an extra sleb128 in
9387 .debug_abbrev. */
9388 break;
9389 case dw_val_class_const_double:
9390 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9391 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9392 size++; /* block */
9393 break;
9394 case dw_val_class_wide_int:
9395 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9396 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9397 if (get_full_len (*a->dw_attr_val.v.val_wide)
9398 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9399 size++; /* block */
9400 break;
9401 case dw_val_class_vec:
9402 size += constant_size (a->dw_attr_val.v.val_vec.length
9403 * a->dw_attr_val.v.val_vec.elt_size)
9404 + a->dw_attr_val.v.val_vec.length
9405 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9406 break;
9407 case dw_val_class_flag:
9408 if (dwarf_version >= 4)
9409 /* Currently all add_AT_flag calls pass in 1 as last argument,
9410 so DW_FORM_flag_present can be used. If that ever changes,
9411 we'll need to use DW_FORM_flag and have some optimization
9412 in build_abbrev_table that will change those to
9413 DW_FORM_flag_present if it is set to 1 in all DIEs using
9414 the same abbrev entry. */
9415 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9416 else
9417 size += 1;
9418 break;
9419 case dw_val_class_die_ref:
9420 if (AT_ref_external (a))
9421 {
9422 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9423 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9424 is sized by target address length, whereas in DWARF3
9425 it's always sized as an offset. */
9426 if (use_debug_types)
9427 size += DWARF_TYPE_SIGNATURE_SIZE;
9428 else if (dwarf_version == 2)
9429 size += DWARF2_ADDR_SIZE;
9430 else
9431 size += DWARF_OFFSET_SIZE;
9432 }
9433 else
9434 size += DWARF_OFFSET_SIZE;
9435 break;
9436 case dw_val_class_fde_ref:
9437 size += DWARF_OFFSET_SIZE;
9438 break;
9439 case dw_val_class_lbl_id:
9440 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9441 {
9442 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9443 size += size_of_uleb128 (AT_index (a));
9444 }
9445 else
9446 size += DWARF2_ADDR_SIZE;
9447 break;
9448 case dw_val_class_lineptr:
9449 case dw_val_class_macptr:
9450 case dw_val_class_loclistsptr:
9451 size += DWARF_OFFSET_SIZE;
9452 break;
9453 case dw_val_class_str:
9454 form = AT_string_form (a);
9455 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9456 size += DWARF_OFFSET_SIZE;
9457 else if (form == dwarf_FORM (DW_FORM_strx))
9458 size += size_of_uleb128 (AT_index (a));
9459 else
9460 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9461 break;
9462 case dw_val_class_file:
9463 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9464 break;
9465 case dw_val_class_data8:
9466 size += 8;
9467 break;
9468 case dw_val_class_vms_delta:
9469 size += DWARF_OFFSET_SIZE;
9470 break;
9471 case dw_val_class_high_pc:
9472 size += DWARF2_ADDR_SIZE;
9473 break;
9474 case dw_val_class_discr_value:
9475 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9476 break;
9477 case dw_val_class_discr_list:
9478 {
9479 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9480
9481 /* This is a block, so we have the block length and then its
9482 data. */
9483 size += constant_size (block_size) + block_size;
9484 }
9485 break;
9486 default:
9487 gcc_unreachable ();
9488 }
9489 }
9490
9491 return size;
9492 }
9493
9494 /* Size the debugging information associated with a given DIE. Visits the
9495 DIE's children recursively. Updates the global variable next_die_offset, on
9496 each time through. Uses the current value of next_die_offset to update the
9497 die_offset field in each DIE. */
9498
9499 static void
9500 calc_die_sizes (dw_die_ref die)
9501 {
9502 dw_die_ref c;
9503
9504 gcc_assert (die->die_offset == 0
9505 || (unsigned long int) die->die_offset == next_die_offset);
9506 die->die_offset = next_die_offset;
9507 next_die_offset += size_of_die (die);
9508
9509 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9510
9511 if (die->die_child != NULL)
9512 /* Count the null byte used to terminate sibling lists. */
9513 next_die_offset += 1;
9514 }
9515
9516 /* Size just the base type children at the start of the CU.
9517 This is needed because build_abbrev needs to size locs
9518 and sizing of type based stack ops needs to know die_offset
9519 values for the base types. */
9520
9521 static void
9522 calc_base_type_die_sizes (void)
9523 {
9524 unsigned long die_offset = (dwarf_split_debug_info
9525 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9526 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9527 unsigned int i;
9528 dw_die_ref base_type;
9529 #if ENABLE_ASSERT_CHECKING
9530 dw_die_ref prev = comp_unit_die ()->die_child;
9531 #endif
9532
9533 die_offset += size_of_die (comp_unit_die ());
9534 for (i = 0; base_types.iterate (i, &base_type); i++)
9535 {
9536 #if ENABLE_ASSERT_CHECKING
9537 gcc_assert (base_type->die_offset == 0
9538 && prev->die_sib == base_type
9539 && base_type->die_child == NULL
9540 && base_type->die_abbrev);
9541 prev = base_type;
9542 #endif
9543 if (abbrev_opt_start
9544 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9545 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9546 base_type->die_offset = die_offset;
9547 die_offset += size_of_die (base_type);
9548 }
9549 }
9550
9551 /* Set the marks for a die and its children. We do this so
9552 that we know whether or not a reference needs to use FORM_ref_addr; only
9553 DIEs in the same CU will be marked. We used to clear out the offset
9554 and use that as the flag, but ran into ordering problems. */
9555
9556 static void
9557 mark_dies (dw_die_ref die)
9558 {
9559 dw_die_ref c;
9560
9561 gcc_assert (!die->die_mark);
9562
9563 die->die_mark = 1;
9564 FOR_EACH_CHILD (die, c, mark_dies (c));
9565 }
9566
9567 /* Clear the marks for a die and its children. */
9568
9569 static void
9570 unmark_dies (dw_die_ref die)
9571 {
9572 dw_die_ref c;
9573
9574 if (! use_debug_types)
9575 gcc_assert (die->die_mark);
9576
9577 die->die_mark = 0;
9578 FOR_EACH_CHILD (die, c, unmark_dies (c));
9579 }
9580
9581 /* Clear the marks for a die, its children and referred dies. */
9582
9583 static void
9584 unmark_all_dies (dw_die_ref die)
9585 {
9586 dw_die_ref c;
9587 dw_attr_node *a;
9588 unsigned ix;
9589
9590 if (!die->die_mark)
9591 return;
9592 die->die_mark = 0;
9593
9594 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9595
9596 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9597 if (AT_class (a) == dw_val_class_die_ref)
9598 unmark_all_dies (AT_ref (a));
9599 }
9600
9601 /* Calculate if the entry should appear in the final output file. It may be
9602 from a pruned a type. */
9603
9604 static bool
9605 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9606 {
9607 /* By limiting gnu pubnames to definitions only, gold can generate a
9608 gdb index without entries for declarations, which don't include
9609 enough information to be useful. */
9610 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9611 return false;
9612
9613 if (table == pubname_table)
9614 {
9615 /* Enumerator names are part of the pubname table, but the
9616 parent DW_TAG_enumeration_type die may have been pruned.
9617 Don't output them if that is the case. */
9618 if (p->die->die_tag == DW_TAG_enumerator &&
9619 (p->die->die_parent == NULL
9620 || !p->die->die_parent->die_perennial_p))
9621 return false;
9622
9623 /* Everything else in the pubname table is included. */
9624 return true;
9625 }
9626
9627 /* The pubtypes table shouldn't include types that have been
9628 pruned. */
9629 return (p->die->die_offset != 0
9630 || !flag_eliminate_unused_debug_types);
9631 }
9632
9633 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9634 generated for the compilation unit. */
9635
9636 static unsigned long
9637 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9638 {
9639 unsigned long size;
9640 unsigned i;
9641 pubname_entry *p;
9642 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9643
9644 size = DWARF_PUBNAMES_HEADER_SIZE;
9645 FOR_EACH_VEC_ELT (*names, i, p)
9646 if (include_pubname_in_output (names, p))
9647 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9648
9649 size += DWARF_OFFSET_SIZE;
9650 return size;
9651 }
9652
9653 /* Return the size of the information in the .debug_aranges section. */
9654
9655 static unsigned long
9656 size_of_aranges (void)
9657 {
9658 unsigned long size;
9659
9660 size = DWARF_ARANGES_HEADER_SIZE;
9661
9662 /* Count the address/length pair for this compilation unit. */
9663 if (text_section_used)
9664 size += 2 * DWARF2_ADDR_SIZE;
9665 if (cold_text_section_used)
9666 size += 2 * DWARF2_ADDR_SIZE;
9667 if (have_multiple_function_sections)
9668 {
9669 unsigned fde_idx;
9670 dw_fde_ref fde;
9671
9672 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9673 {
9674 if (DECL_IGNORED_P (fde->decl))
9675 continue;
9676 if (!fde->in_std_section)
9677 size += 2 * DWARF2_ADDR_SIZE;
9678 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9679 size += 2 * DWARF2_ADDR_SIZE;
9680 }
9681 }
9682
9683 /* Count the two zero words used to terminated the address range table. */
9684 size += 2 * DWARF2_ADDR_SIZE;
9685 return size;
9686 }
9687 \f
9688 /* Select the encoding of an attribute value. */
9689
9690 static enum dwarf_form
9691 value_format (dw_attr_node *a)
9692 {
9693 switch (AT_class (a))
9694 {
9695 case dw_val_class_addr:
9696 /* Only very few attributes allow DW_FORM_addr. */
9697 switch (a->dw_attr)
9698 {
9699 case DW_AT_low_pc:
9700 case DW_AT_high_pc:
9701 case DW_AT_entry_pc:
9702 case DW_AT_trampoline:
9703 return (AT_index (a) == NOT_INDEXED
9704 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9705 default:
9706 break;
9707 }
9708 switch (DWARF2_ADDR_SIZE)
9709 {
9710 case 1:
9711 return DW_FORM_data1;
9712 case 2:
9713 return DW_FORM_data2;
9714 case 4:
9715 return DW_FORM_data4;
9716 case 8:
9717 return DW_FORM_data8;
9718 default:
9719 gcc_unreachable ();
9720 }
9721 case dw_val_class_loc_list:
9722 case dw_val_class_view_list:
9723 if (dwarf_split_debug_info
9724 && dwarf_version >= 5
9725 && AT_loc_list (a)->num_assigned)
9726 return DW_FORM_loclistx;
9727 /* FALLTHRU */
9728 case dw_val_class_range_list:
9729 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9730 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9731 care about sizes of .debug* sections in shared libraries and
9732 executables and don't take into account relocations that affect just
9733 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9734 table in the .debug_rnglists section. */
9735 if (dwarf_split_debug_info
9736 && dwarf_version >= 5
9737 && AT_class (a) == dw_val_class_range_list
9738 && rnglist_idx
9739 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9740 return DW_FORM_rnglistx;
9741 if (dwarf_version >= 4)
9742 return DW_FORM_sec_offset;
9743 /* FALLTHRU */
9744 case dw_val_class_vms_delta:
9745 case dw_val_class_offset:
9746 switch (DWARF_OFFSET_SIZE)
9747 {
9748 case 4:
9749 return DW_FORM_data4;
9750 case 8:
9751 return DW_FORM_data8;
9752 default:
9753 gcc_unreachable ();
9754 }
9755 case dw_val_class_loc:
9756 if (dwarf_version >= 4)
9757 return DW_FORM_exprloc;
9758 switch (constant_size (size_of_locs (AT_loc (a))))
9759 {
9760 case 1:
9761 return DW_FORM_block1;
9762 case 2:
9763 return DW_FORM_block2;
9764 case 4:
9765 return DW_FORM_block4;
9766 default:
9767 gcc_unreachable ();
9768 }
9769 case dw_val_class_const:
9770 return DW_FORM_sdata;
9771 case dw_val_class_unsigned_const:
9772 switch (constant_size (AT_unsigned (a)))
9773 {
9774 case 1:
9775 return DW_FORM_data1;
9776 case 2:
9777 return DW_FORM_data2;
9778 case 4:
9779 /* In DWARF3 DW_AT_data_member_location with
9780 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9781 constant, so we need to use DW_FORM_udata if we need
9782 a large constant. */
9783 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9784 return DW_FORM_udata;
9785 return DW_FORM_data4;
9786 case 8:
9787 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9788 return DW_FORM_udata;
9789 return DW_FORM_data8;
9790 default:
9791 gcc_unreachable ();
9792 }
9793 case dw_val_class_const_implicit:
9794 case dw_val_class_unsigned_const_implicit:
9795 case dw_val_class_file_implicit:
9796 return DW_FORM_implicit_const;
9797 case dw_val_class_const_double:
9798 switch (HOST_BITS_PER_WIDE_INT)
9799 {
9800 case 8:
9801 return DW_FORM_data2;
9802 case 16:
9803 return DW_FORM_data4;
9804 case 32:
9805 return DW_FORM_data8;
9806 case 64:
9807 if (dwarf_version >= 5)
9808 return DW_FORM_data16;
9809 /* FALLTHRU */
9810 default:
9811 return DW_FORM_block1;
9812 }
9813 case dw_val_class_wide_int:
9814 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9815 {
9816 case 8:
9817 return DW_FORM_data1;
9818 case 16:
9819 return DW_FORM_data2;
9820 case 32:
9821 return DW_FORM_data4;
9822 case 64:
9823 return DW_FORM_data8;
9824 case 128:
9825 if (dwarf_version >= 5)
9826 return DW_FORM_data16;
9827 /* FALLTHRU */
9828 default:
9829 return DW_FORM_block1;
9830 }
9831 case dw_val_class_symview:
9832 /* ??? We might use uleb128, but then we'd have to compute
9833 .debug_info offsets in the assembler. */
9834 if (symview_upper_bound <= 0xff)
9835 return DW_FORM_data1;
9836 else if (symview_upper_bound <= 0xffff)
9837 return DW_FORM_data2;
9838 else if (symview_upper_bound <= 0xffffffff)
9839 return DW_FORM_data4;
9840 else
9841 return DW_FORM_data8;
9842 case dw_val_class_vec:
9843 switch (constant_size (a->dw_attr_val.v.val_vec.length
9844 * a->dw_attr_val.v.val_vec.elt_size))
9845 {
9846 case 1:
9847 return DW_FORM_block1;
9848 case 2:
9849 return DW_FORM_block2;
9850 case 4:
9851 return DW_FORM_block4;
9852 default:
9853 gcc_unreachable ();
9854 }
9855 case dw_val_class_flag:
9856 if (dwarf_version >= 4)
9857 {
9858 /* Currently all add_AT_flag calls pass in 1 as last argument,
9859 so DW_FORM_flag_present can be used. If that ever changes,
9860 we'll need to use DW_FORM_flag and have some optimization
9861 in build_abbrev_table that will change those to
9862 DW_FORM_flag_present if it is set to 1 in all DIEs using
9863 the same abbrev entry. */
9864 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9865 return DW_FORM_flag_present;
9866 }
9867 return DW_FORM_flag;
9868 case dw_val_class_die_ref:
9869 if (AT_ref_external (a))
9870 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9871 else
9872 return DW_FORM_ref;
9873 case dw_val_class_fde_ref:
9874 return DW_FORM_data;
9875 case dw_val_class_lbl_id:
9876 return (AT_index (a) == NOT_INDEXED
9877 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9878 case dw_val_class_lineptr:
9879 case dw_val_class_macptr:
9880 case dw_val_class_loclistsptr:
9881 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9882 case dw_val_class_str:
9883 return AT_string_form (a);
9884 case dw_val_class_file:
9885 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9886 {
9887 case 1:
9888 return DW_FORM_data1;
9889 case 2:
9890 return DW_FORM_data2;
9891 case 4:
9892 return DW_FORM_data4;
9893 default:
9894 gcc_unreachable ();
9895 }
9896
9897 case dw_val_class_data8:
9898 return DW_FORM_data8;
9899
9900 case dw_val_class_high_pc:
9901 switch (DWARF2_ADDR_SIZE)
9902 {
9903 case 1:
9904 return DW_FORM_data1;
9905 case 2:
9906 return DW_FORM_data2;
9907 case 4:
9908 return DW_FORM_data4;
9909 case 8:
9910 return DW_FORM_data8;
9911 default:
9912 gcc_unreachable ();
9913 }
9914
9915 case dw_val_class_discr_value:
9916 return (a->dw_attr_val.v.val_discr_value.pos
9917 ? DW_FORM_udata
9918 : DW_FORM_sdata);
9919 case dw_val_class_discr_list:
9920 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9921 {
9922 case 1:
9923 return DW_FORM_block1;
9924 case 2:
9925 return DW_FORM_block2;
9926 case 4:
9927 return DW_FORM_block4;
9928 default:
9929 gcc_unreachable ();
9930 }
9931
9932 default:
9933 gcc_unreachable ();
9934 }
9935 }
9936
9937 /* Output the encoding of an attribute value. */
9938
9939 static void
9940 output_value_format (dw_attr_node *a)
9941 {
9942 enum dwarf_form form = value_format (a);
9943
9944 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9945 }
9946
9947 /* Given a die and id, produce the appropriate abbreviations. */
9948
9949 static void
9950 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9951 {
9952 unsigned ix;
9953 dw_attr_node *a_attr;
9954
9955 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9956 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9957 dwarf_tag_name (abbrev->die_tag));
9958
9959 if (abbrev->die_child != NULL)
9960 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9961 else
9962 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9963
9964 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9965 {
9966 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9967 dwarf_attr_name (a_attr->dw_attr));
9968 output_value_format (a_attr);
9969 if (value_format (a_attr) == DW_FORM_implicit_const)
9970 {
9971 if (AT_class (a_attr) == dw_val_class_file_implicit)
9972 {
9973 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9974 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9975 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9976 }
9977 else
9978 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9979 }
9980 }
9981
9982 dw2_asm_output_data (1, 0, NULL);
9983 dw2_asm_output_data (1, 0, NULL);
9984 }
9985
9986
9987 /* Output the .debug_abbrev section which defines the DIE abbreviation
9988 table. */
9989
9990 static void
9991 output_abbrev_section (void)
9992 {
9993 unsigned int abbrev_id;
9994 dw_die_ref abbrev;
9995
9996 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9997 if (abbrev_id != 0)
9998 output_die_abbrevs (abbrev_id, abbrev);
9999
10000 /* Terminate the table. */
10001 dw2_asm_output_data (1, 0, NULL);
10002 }
10003
10004 /* Return a new location list, given the begin and end range, and the
10005 expression. */
10006
10007 static inline dw_loc_list_ref
10008 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10009 const char *end, var_loc_view vend,
10010 const char *section)
10011 {
10012 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10013
10014 retlist->begin = begin;
10015 retlist->begin_entry = NULL;
10016 retlist->end = end;
10017 retlist->expr = expr;
10018 retlist->section = section;
10019 retlist->vbegin = vbegin;
10020 retlist->vend = vend;
10021
10022 return retlist;
10023 }
10024
10025 /* Return true iff there's any nonzero view number in the loc list.
10026
10027 ??? When views are not enabled, we'll often extend a single range
10028 to the entire function, so that we emit a single location
10029 expression rather than a location list. With views, even with a
10030 single range, we'll output a list if start or end have a nonzero
10031 view. If we change this, we may want to stop splitting a single
10032 range in dw_loc_list just because of a nonzero view, even if it
10033 straddles across hot/cold partitions. */
10034
10035 static bool
10036 loc_list_has_views (dw_loc_list_ref list)
10037 {
10038 if (!debug_variable_location_views)
10039 return false;
10040
10041 for (dw_loc_list_ref loc = list;
10042 loc != NULL; loc = loc->dw_loc_next)
10043 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10044 return true;
10045
10046 return false;
10047 }
10048
10049 /* Generate a new internal symbol for this location list node, if it
10050 hasn't got one yet. */
10051
10052 static inline void
10053 gen_llsym (dw_loc_list_ref list)
10054 {
10055 gcc_assert (!list->ll_symbol);
10056 list->ll_symbol = gen_internal_sym ("LLST");
10057
10058 if (!loc_list_has_views (list))
10059 return;
10060
10061 if (dwarf2out_locviews_in_attribute ())
10062 {
10063 /* Use the same label_num for the view list. */
10064 label_num--;
10065 list->vl_symbol = gen_internal_sym ("LVUS");
10066 }
10067 else
10068 list->vl_symbol = list->ll_symbol;
10069 }
10070
10071 /* Generate a symbol for the list, but only if we really want to emit
10072 it as a list. */
10073
10074 static inline void
10075 maybe_gen_llsym (dw_loc_list_ref list)
10076 {
10077 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10078 return;
10079
10080 gen_llsym (list);
10081 }
10082
10083 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10084 NULL, don't consider size of the location expression. If we're not
10085 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10086 representation in *SIZEP. */
10087
10088 static bool
10089 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10090 {
10091 /* Don't output an entry that starts and ends at the same address. */
10092 if (strcmp (curr->begin, curr->end) == 0
10093 && curr->vbegin == curr->vend && !curr->force)
10094 return true;
10095
10096 if (!sizep)
10097 return false;
10098
10099 unsigned long size = size_of_locs (curr->expr);
10100
10101 /* If the expression is too large, drop it on the floor. We could
10102 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10103 in the expression, but >= 64KB expressions for a single value
10104 in a single range are unlikely very useful. */
10105 if (dwarf_version < 5 && size > 0xffff)
10106 return true;
10107
10108 *sizep = size;
10109
10110 return false;
10111 }
10112
10113 /* Output a view pair loclist entry for CURR, if it requires one. */
10114
10115 static void
10116 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10117 {
10118 if (!dwarf2out_locviews_in_loclist ())
10119 return;
10120
10121 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10122 return;
10123
10124 #ifdef DW_LLE_view_pair
10125 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10126
10127 if (dwarf2out_as_locview_support)
10128 {
10129 if (ZERO_VIEW_P (curr->vbegin))
10130 dw2_asm_output_data_uleb128 (0, "Location view begin");
10131 else
10132 {
10133 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10134 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10135 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10136 }
10137
10138 if (ZERO_VIEW_P (curr->vend))
10139 dw2_asm_output_data_uleb128 (0, "Location view end");
10140 else
10141 {
10142 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10143 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10144 dw2_asm_output_symname_uleb128 (label, "Location view end");
10145 }
10146 }
10147 else
10148 {
10149 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10150 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10151 }
10152 #endif /* DW_LLE_view_pair */
10153
10154 return;
10155 }
10156
10157 /* Output the location list given to us. */
10158
10159 static void
10160 output_loc_list (dw_loc_list_ref list_head)
10161 {
10162 int vcount = 0, lcount = 0;
10163
10164 if (list_head->emitted)
10165 return;
10166 list_head->emitted = true;
10167
10168 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10169 {
10170 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10171
10172 for (dw_loc_list_ref curr = list_head; curr != NULL;
10173 curr = curr->dw_loc_next)
10174 {
10175 unsigned long size;
10176
10177 if (skip_loc_list_entry (curr, &size))
10178 continue;
10179
10180 vcount++;
10181
10182 /* ?? dwarf_split_debug_info? */
10183 if (dwarf2out_as_locview_support)
10184 {
10185 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10186
10187 if (!ZERO_VIEW_P (curr->vbegin))
10188 {
10189 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10190 dw2_asm_output_symname_uleb128 (label,
10191 "View list begin (%s)",
10192 list_head->vl_symbol);
10193 }
10194 else
10195 dw2_asm_output_data_uleb128 (0,
10196 "View list begin (%s)",
10197 list_head->vl_symbol);
10198
10199 if (!ZERO_VIEW_P (curr->vend))
10200 {
10201 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10202 dw2_asm_output_symname_uleb128 (label,
10203 "View list end (%s)",
10204 list_head->vl_symbol);
10205 }
10206 else
10207 dw2_asm_output_data_uleb128 (0,
10208 "View list end (%s)",
10209 list_head->vl_symbol);
10210 }
10211 else
10212 {
10213 dw2_asm_output_data_uleb128 (curr->vbegin,
10214 "View list begin (%s)",
10215 list_head->vl_symbol);
10216 dw2_asm_output_data_uleb128 (curr->vend,
10217 "View list end (%s)",
10218 list_head->vl_symbol);
10219 }
10220 }
10221 }
10222
10223 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10224
10225 const char *last_section = NULL;
10226 const char *base_label = NULL;
10227
10228 /* Walk the location list, and output each range + expression. */
10229 for (dw_loc_list_ref curr = list_head; curr != NULL;
10230 curr = curr->dw_loc_next)
10231 {
10232 unsigned long size;
10233
10234 /* Skip this entry? If we skip it here, we must skip it in the
10235 view list above as well. */
10236 if (skip_loc_list_entry (curr, &size))
10237 continue;
10238
10239 lcount++;
10240
10241 if (dwarf_version >= 5)
10242 {
10243 if (dwarf_split_debug_info)
10244 {
10245 dwarf2out_maybe_output_loclist_view_pair (curr);
10246 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10247 uleb128 index into .debug_addr and uleb128 length. */
10248 dw2_asm_output_data (1, DW_LLE_startx_length,
10249 "DW_LLE_startx_length (%s)",
10250 list_head->ll_symbol);
10251 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10252 "Location list range start index "
10253 "(%s)", curr->begin);
10254 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10255 For that case we probably need to emit DW_LLE_startx_endx,
10256 but we'd need 2 .debug_addr entries rather than just one. */
10257 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10258 "Location list length (%s)",
10259 list_head->ll_symbol);
10260 }
10261 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10262 {
10263 dwarf2out_maybe_output_loclist_view_pair (curr);
10264 /* If all code is in .text section, the base address is
10265 already provided by the CU attributes. Use
10266 DW_LLE_offset_pair where both addresses are uleb128 encoded
10267 offsets against that base. */
10268 dw2_asm_output_data (1, DW_LLE_offset_pair,
10269 "DW_LLE_offset_pair (%s)",
10270 list_head->ll_symbol);
10271 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10272 "Location list begin address (%s)",
10273 list_head->ll_symbol);
10274 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10275 "Location list end address (%s)",
10276 list_head->ll_symbol);
10277 }
10278 else if (HAVE_AS_LEB128)
10279 {
10280 /* Otherwise, find out how many consecutive entries could share
10281 the same base entry. If just one, emit DW_LLE_start_length,
10282 otherwise emit DW_LLE_base_address for the base address
10283 followed by a series of DW_LLE_offset_pair. */
10284 if (last_section == NULL || curr->section != last_section)
10285 {
10286 dw_loc_list_ref curr2;
10287 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10288 curr2 = curr2->dw_loc_next)
10289 {
10290 if (strcmp (curr2->begin, curr2->end) == 0
10291 && !curr2->force)
10292 continue;
10293 break;
10294 }
10295 if (curr2 == NULL || curr->section != curr2->section)
10296 last_section = NULL;
10297 else
10298 {
10299 last_section = curr->section;
10300 base_label = curr->begin;
10301 dw2_asm_output_data (1, DW_LLE_base_address,
10302 "DW_LLE_base_address (%s)",
10303 list_head->ll_symbol);
10304 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10305 "Base address (%s)",
10306 list_head->ll_symbol);
10307 }
10308 }
10309 /* Only one entry with the same base address. Use
10310 DW_LLE_start_length with absolute address and uleb128
10311 length. */
10312 if (last_section == NULL)
10313 {
10314 dwarf2out_maybe_output_loclist_view_pair (curr);
10315 dw2_asm_output_data (1, DW_LLE_start_length,
10316 "DW_LLE_start_length (%s)",
10317 list_head->ll_symbol);
10318 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10319 "Location list begin address (%s)",
10320 list_head->ll_symbol);
10321 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10322 "Location list length "
10323 "(%s)", list_head->ll_symbol);
10324 }
10325 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10326 DW_LLE_base_address. */
10327 else
10328 {
10329 dwarf2out_maybe_output_loclist_view_pair (curr);
10330 dw2_asm_output_data (1, DW_LLE_offset_pair,
10331 "DW_LLE_offset_pair (%s)",
10332 list_head->ll_symbol);
10333 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10334 "Location list begin address "
10335 "(%s)", list_head->ll_symbol);
10336 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10337 "Location list end address "
10338 "(%s)", list_head->ll_symbol);
10339 }
10340 }
10341 /* The assembler does not support .uleb128 directive. Emit
10342 DW_LLE_start_end with a pair of absolute addresses. */
10343 else
10344 {
10345 dwarf2out_maybe_output_loclist_view_pair (curr);
10346 dw2_asm_output_data (1, DW_LLE_start_end,
10347 "DW_LLE_start_end (%s)",
10348 list_head->ll_symbol);
10349 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10350 "Location list begin address (%s)",
10351 list_head->ll_symbol);
10352 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10353 "Location list end address (%s)",
10354 list_head->ll_symbol);
10355 }
10356 }
10357 else if (dwarf_split_debug_info)
10358 {
10359 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10360 and 4 byte length. */
10361 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10362 "Location list start/length entry (%s)",
10363 list_head->ll_symbol);
10364 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10365 "Location list range start index (%s)",
10366 curr->begin);
10367 /* The length field is 4 bytes. If we ever need to support
10368 an 8-byte length, we can add a new DW_LLE code or fall back
10369 to DW_LLE_GNU_start_end_entry. */
10370 dw2_asm_output_delta (4, curr->end, curr->begin,
10371 "Location list range length (%s)",
10372 list_head->ll_symbol);
10373 }
10374 else if (!have_multiple_function_sections)
10375 {
10376 /* Pair of relative addresses against start of text section. */
10377 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10378 "Location list begin address (%s)",
10379 list_head->ll_symbol);
10380 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10381 "Location list end address (%s)",
10382 list_head->ll_symbol);
10383 }
10384 else
10385 {
10386 /* Pair of absolute addresses. */
10387 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10388 "Location list begin address (%s)",
10389 list_head->ll_symbol);
10390 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10391 "Location list end address (%s)",
10392 list_head->ll_symbol);
10393 }
10394
10395 /* Output the block length for this list of location operations. */
10396 if (dwarf_version >= 5)
10397 dw2_asm_output_data_uleb128 (size, "Location expression size");
10398 else
10399 {
10400 gcc_assert (size <= 0xffff);
10401 dw2_asm_output_data (2, size, "Location expression size");
10402 }
10403
10404 output_loc_sequence (curr->expr, -1);
10405 }
10406
10407 /* And finally list termination. */
10408 if (dwarf_version >= 5)
10409 dw2_asm_output_data (1, DW_LLE_end_of_list,
10410 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10411 else if (dwarf_split_debug_info)
10412 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10413 "Location list terminator (%s)",
10414 list_head->ll_symbol);
10415 else
10416 {
10417 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10418 "Location list terminator begin (%s)",
10419 list_head->ll_symbol);
10420 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10421 "Location list terminator end (%s)",
10422 list_head->ll_symbol);
10423 }
10424
10425 gcc_assert (!list_head->vl_symbol
10426 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10427 }
10428
10429 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10430 section. Emit a relocated reference if val_entry is NULL, otherwise,
10431 emit an indirect reference. */
10432
10433 static void
10434 output_range_list_offset (dw_attr_node *a)
10435 {
10436 const char *name = dwarf_attr_name (a->dw_attr);
10437
10438 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10439 {
10440 if (dwarf_version >= 5)
10441 {
10442 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10443 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10444 debug_ranges_section, "%s", name);
10445 }
10446 else
10447 {
10448 char *p = strchr (ranges_section_label, '\0');
10449 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10450 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10451 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10452 debug_ranges_section, "%s", name);
10453 *p = '\0';
10454 }
10455 }
10456 else if (dwarf_version >= 5)
10457 {
10458 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10459 gcc_assert (rnglist_idx);
10460 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10461 }
10462 else
10463 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10464 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10465 "%s (offset from %s)", name, ranges_section_label);
10466 }
10467
10468 /* Output the offset into the debug_loc section. */
10469
10470 static void
10471 output_loc_list_offset (dw_attr_node *a)
10472 {
10473 char *sym = AT_loc_list (a)->ll_symbol;
10474
10475 gcc_assert (sym);
10476 if (!dwarf_split_debug_info)
10477 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10478 "%s", dwarf_attr_name (a->dw_attr));
10479 else if (dwarf_version >= 5)
10480 {
10481 gcc_assert (AT_loc_list (a)->num_assigned);
10482 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10483 dwarf_attr_name (a->dw_attr),
10484 sym);
10485 }
10486 else
10487 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10488 "%s", dwarf_attr_name (a->dw_attr));
10489 }
10490
10491 /* Output the offset into the debug_loc section. */
10492
10493 static void
10494 output_view_list_offset (dw_attr_node *a)
10495 {
10496 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10497
10498 gcc_assert (sym);
10499 if (dwarf_split_debug_info)
10500 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10501 "%s", dwarf_attr_name (a->dw_attr));
10502 else
10503 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10504 "%s", dwarf_attr_name (a->dw_attr));
10505 }
10506
10507 /* Output an attribute's index or value appropriately. */
10508
10509 static void
10510 output_attr_index_or_value (dw_attr_node *a)
10511 {
10512 const char *name = dwarf_attr_name (a->dw_attr);
10513
10514 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10515 {
10516 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10517 return;
10518 }
10519 switch (AT_class (a))
10520 {
10521 case dw_val_class_addr:
10522 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10523 break;
10524 case dw_val_class_high_pc:
10525 case dw_val_class_lbl_id:
10526 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10527 break;
10528 default:
10529 gcc_unreachable ();
10530 }
10531 }
10532
10533 /* Output a type signature. */
10534
10535 static inline void
10536 output_signature (const char *sig, const char *name)
10537 {
10538 int i;
10539
10540 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10541 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10542 }
10543
10544 /* Output a discriminant value. */
10545
10546 static inline void
10547 output_discr_value (dw_discr_value *discr_value, const char *name)
10548 {
10549 if (discr_value->pos)
10550 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10551 else
10552 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10553 }
10554
10555 /* Output the DIE and its attributes. Called recursively to generate
10556 the definitions of each child DIE. */
10557
10558 static void
10559 output_die (dw_die_ref die)
10560 {
10561 dw_attr_node *a;
10562 dw_die_ref c;
10563 unsigned long size;
10564 unsigned ix;
10565
10566 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10567 (unsigned long)die->die_offset,
10568 dwarf_tag_name (die->die_tag));
10569
10570 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10571 {
10572 const char *name = dwarf_attr_name (a->dw_attr);
10573
10574 switch (AT_class (a))
10575 {
10576 case dw_val_class_addr:
10577 output_attr_index_or_value (a);
10578 break;
10579
10580 case dw_val_class_offset:
10581 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10582 "%s", name);
10583 break;
10584
10585 case dw_val_class_range_list:
10586 output_range_list_offset (a);
10587 break;
10588
10589 case dw_val_class_loc:
10590 size = size_of_locs (AT_loc (a));
10591
10592 /* Output the block length for this list of location operations. */
10593 if (dwarf_version >= 4)
10594 dw2_asm_output_data_uleb128 (size, "%s", name);
10595 else
10596 dw2_asm_output_data (constant_size (size), size, "%s", name);
10597
10598 output_loc_sequence (AT_loc (a), -1);
10599 break;
10600
10601 case dw_val_class_const:
10602 /* ??? It would be slightly more efficient to use a scheme like is
10603 used for unsigned constants below, but gdb 4.x does not sign
10604 extend. Gdb 5.x does sign extend. */
10605 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10606 break;
10607
10608 case dw_val_class_unsigned_const:
10609 {
10610 int csize = constant_size (AT_unsigned (a));
10611 if (dwarf_version == 3
10612 && a->dw_attr == DW_AT_data_member_location
10613 && csize >= 4)
10614 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10615 else
10616 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10617 }
10618 break;
10619
10620 case dw_val_class_symview:
10621 {
10622 int vsize;
10623 if (symview_upper_bound <= 0xff)
10624 vsize = 1;
10625 else if (symview_upper_bound <= 0xffff)
10626 vsize = 2;
10627 else if (symview_upper_bound <= 0xffffffff)
10628 vsize = 4;
10629 else
10630 vsize = 8;
10631 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10632 "%s", name);
10633 }
10634 break;
10635
10636 case dw_val_class_const_implicit:
10637 if (flag_debug_asm)
10638 fprintf (asm_out_file, "\t\t\t%s %s ("
10639 HOST_WIDE_INT_PRINT_DEC ")\n",
10640 ASM_COMMENT_START, name, AT_int (a));
10641 break;
10642
10643 case dw_val_class_unsigned_const_implicit:
10644 if (flag_debug_asm)
10645 fprintf (asm_out_file, "\t\t\t%s %s ("
10646 HOST_WIDE_INT_PRINT_HEX ")\n",
10647 ASM_COMMENT_START, name, AT_unsigned (a));
10648 break;
10649
10650 case dw_val_class_const_double:
10651 {
10652 unsigned HOST_WIDE_INT first, second;
10653
10654 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10655 dw2_asm_output_data (1,
10656 HOST_BITS_PER_DOUBLE_INT
10657 / HOST_BITS_PER_CHAR,
10658 NULL);
10659
10660 if (WORDS_BIG_ENDIAN)
10661 {
10662 first = a->dw_attr_val.v.val_double.high;
10663 second = a->dw_attr_val.v.val_double.low;
10664 }
10665 else
10666 {
10667 first = a->dw_attr_val.v.val_double.low;
10668 second = a->dw_attr_val.v.val_double.high;
10669 }
10670
10671 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10672 first, "%s", name);
10673 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10674 second, NULL);
10675 }
10676 break;
10677
10678 case dw_val_class_wide_int:
10679 {
10680 int i;
10681 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10682 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10683 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10684 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10685 * l, NULL);
10686
10687 if (WORDS_BIG_ENDIAN)
10688 for (i = len - 1; i >= 0; --i)
10689 {
10690 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10691 "%s", name);
10692 name = "";
10693 }
10694 else
10695 for (i = 0; i < len; ++i)
10696 {
10697 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10698 "%s", name);
10699 name = "";
10700 }
10701 }
10702 break;
10703
10704 case dw_val_class_vec:
10705 {
10706 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10707 unsigned int len = a->dw_attr_val.v.val_vec.length;
10708 unsigned int i;
10709 unsigned char *p;
10710
10711 dw2_asm_output_data (constant_size (len * elt_size),
10712 len * elt_size, "%s", name);
10713 if (elt_size > sizeof (HOST_WIDE_INT))
10714 {
10715 elt_size /= 2;
10716 len *= 2;
10717 }
10718 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10719 i < len;
10720 i++, p += elt_size)
10721 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10722 "fp or vector constant word %u", i);
10723 break;
10724 }
10725
10726 case dw_val_class_flag:
10727 if (dwarf_version >= 4)
10728 {
10729 /* Currently all add_AT_flag calls pass in 1 as last argument,
10730 so DW_FORM_flag_present can be used. If that ever changes,
10731 we'll need to use DW_FORM_flag and have some optimization
10732 in build_abbrev_table that will change those to
10733 DW_FORM_flag_present if it is set to 1 in all DIEs using
10734 the same abbrev entry. */
10735 gcc_assert (AT_flag (a) == 1);
10736 if (flag_debug_asm)
10737 fprintf (asm_out_file, "\t\t\t%s %s\n",
10738 ASM_COMMENT_START, name);
10739 break;
10740 }
10741 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10742 break;
10743
10744 case dw_val_class_loc_list:
10745 output_loc_list_offset (a);
10746 break;
10747
10748 case dw_val_class_view_list:
10749 output_view_list_offset (a);
10750 break;
10751
10752 case dw_val_class_die_ref:
10753 if (AT_ref_external (a))
10754 {
10755 if (AT_ref (a)->comdat_type_p)
10756 {
10757 comdat_type_node *type_node
10758 = AT_ref (a)->die_id.die_type_node;
10759
10760 gcc_assert (type_node);
10761 output_signature (type_node->signature, name);
10762 }
10763 else
10764 {
10765 const char *sym = AT_ref (a)->die_id.die_symbol;
10766 int size;
10767
10768 gcc_assert (sym);
10769 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10770 length, whereas in DWARF3 it's always sized as an
10771 offset. */
10772 if (dwarf_version == 2)
10773 size = DWARF2_ADDR_SIZE;
10774 else
10775 size = DWARF_OFFSET_SIZE;
10776 /* ??? We cannot unconditionally output die_offset if
10777 non-zero - others might create references to those
10778 DIEs via symbols.
10779 And we do not clear its DIE offset after outputting it
10780 (and the label refers to the actual DIEs, not the
10781 DWARF CU unit header which is when using label + offset
10782 would be the correct thing to do).
10783 ??? This is the reason for the with_offset flag. */
10784 if (AT_ref (a)->with_offset)
10785 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10786 debug_info_section, "%s", name);
10787 else
10788 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10789 name);
10790 }
10791 }
10792 else
10793 {
10794 gcc_assert (AT_ref (a)->die_offset);
10795 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10796 "%s", name);
10797 }
10798 break;
10799
10800 case dw_val_class_fde_ref:
10801 {
10802 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10803
10804 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10805 a->dw_attr_val.v.val_fde_index * 2);
10806 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10807 "%s", name);
10808 }
10809 break;
10810
10811 case dw_val_class_vms_delta:
10812 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10813 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10814 AT_vms_delta2 (a), AT_vms_delta1 (a),
10815 "%s", name);
10816 #else
10817 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10818 AT_vms_delta2 (a), AT_vms_delta1 (a),
10819 "%s", name);
10820 #endif
10821 break;
10822
10823 case dw_val_class_lbl_id:
10824 output_attr_index_or_value (a);
10825 break;
10826
10827 case dw_val_class_lineptr:
10828 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10829 debug_line_section, "%s", name);
10830 break;
10831
10832 case dw_val_class_macptr:
10833 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10834 debug_macinfo_section, "%s", name);
10835 break;
10836
10837 case dw_val_class_loclistsptr:
10838 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10839 debug_loc_section, "%s", name);
10840 break;
10841
10842 case dw_val_class_str:
10843 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10844 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10845 a->dw_attr_val.v.val_str->label,
10846 debug_str_section,
10847 "%s: \"%s\"", name, AT_string (a));
10848 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10849 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10850 a->dw_attr_val.v.val_str->label,
10851 debug_line_str_section,
10852 "%s: \"%s\"", name, AT_string (a));
10853 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10854 dw2_asm_output_data_uleb128 (AT_index (a),
10855 "%s: \"%s\"", name, AT_string (a));
10856 else
10857 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10858 break;
10859
10860 case dw_val_class_file:
10861 {
10862 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10863
10864 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10865 a->dw_attr_val.v.val_file->filename);
10866 break;
10867 }
10868
10869 case dw_val_class_file_implicit:
10870 if (flag_debug_asm)
10871 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10872 ASM_COMMENT_START, name,
10873 maybe_emit_file (a->dw_attr_val.v.val_file),
10874 a->dw_attr_val.v.val_file->filename);
10875 break;
10876
10877 case dw_val_class_data8:
10878 {
10879 int i;
10880
10881 for (i = 0; i < 8; i++)
10882 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10883 i == 0 ? "%s" : NULL, name);
10884 break;
10885 }
10886
10887 case dw_val_class_high_pc:
10888 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10889 get_AT_low_pc (die), "DW_AT_high_pc");
10890 break;
10891
10892 case dw_val_class_discr_value:
10893 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10894 break;
10895
10896 case dw_val_class_discr_list:
10897 {
10898 dw_discr_list_ref list = AT_discr_list (a);
10899 const int size = size_of_discr_list (list);
10900
10901 /* This is a block, so output its length first. */
10902 dw2_asm_output_data (constant_size (size), size,
10903 "%s: block size", name);
10904
10905 for (; list != NULL; list = list->dw_discr_next)
10906 {
10907 /* One byte for the discriminant value descriptor, and then as
10908 many LEB128 numbers as required. */
10909 if (list->dw_discr_range)
10910 dw2_asm_output_data (1, DW_DSC_range,
10911 "%s: DW_DSC_range", name);
10912 else
10913 dw2_asm_output_data (1, DW_DSC_label,
10914 "%s: DW_DSC_label", name);
10915
10916 output_discr_value (&list->dw_discr_lower_bound, name);
10917 if (list->dw_discr_range)
10918 output_discr_value (&list->dw_discr_upper_bound, name);
10919 }
10920 break;
10921 }
10922
10923 default:
10924 gcc_unreachable ();
10925 }
10926 }
10927
10928 FOR_EACH_CHILD (die, c, output_die (c));
10929
10930 /* Add null byte to terminate sibling list. */
10931 if (die->die_child != NULL)
10932 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10933 (unsigned long) die->die_offset);
10934 }
10935
10936 /* Output the dwarf version number. */
10937
10938 static void
10939 output_dwarf_version ()
10940 {
10941 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10942 views in loclist. That will change eventually. */
10943 if (dwarf_version == 6)
10944 {
10945 static bool once;
10946 if (!once)
10947 {
10948 warning (0,
10949 "-gdwarf-6 is output as version 5 with incompatibilities");
10950 once = true;
10951 }
10952 dw2_asm_output_data (2, 5, "DWARF version number");
10953 }
10954 else
10955 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10956 }
10957
10958 /* Output the compilation unit that appears at the beginning of the
10959 .debug_info section, and precedes the DIE descriptions. */
10960
10961 static void
10962 output_compilation_unit_header (enum dwarf_unit_type ut)
10963 {
10964 if (!XCOFF_DEBUGGING_INFO)
10965 {
10966 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10967 dw2_asm_output_data (4, 0xffffffff,
10968 "Initial length escape value indicating 64-bit DWARF extension");
10969 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10970 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10971 "Length of Compilation Unit Info");
10972 }
10973
10974 output_dwarf_version ();
10975 if (dwarf_version >= 5)
10976 {
10977 const char *name;
10978 switch (ut)
10979 {
10980 case DW_UT_compile: name = "DW_UT_compile"; break;
10981 case DW_UT_type: name = "DW_UT_type"; break;
10982 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10983 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10984 default: gcc_unreachable ();
10985 }
10986 dw2_asm_output_data (1, ut, "%s", name);
10987 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10988 }
10989 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10990 debug_abbrev_section,
10991 "Offset Into Abbrev. Section");
10992 if (dwarf_version < 5)
10993 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10994 }
10995
10996 /* Output the compilation unit DIE and its children. */
10997
10998 static void
10999 output_comp_unit (dw_die_ref die, int output_if_empty,
11000 const unsigned char *dwo_id)
11001 {
11002 const char *secname, *oldsym;
11003 char *tmp;
11004
11005 /* Unless we are outputting main CU, we may throw away empty ones. */
11006 if (!output_if_empty && die->die_child == NULL)
11007 return;
11008
11009 /* Even if there are no children of this DIE, we must output the information
11010 about the compilation unit. Otherwise, on an empty translation unit, we
11011 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11012 will then complain when examining the file. First mark all the DIEs in
11013 this CU so we know which get local refs. */
11014 mark_dies (die);
11015
11016 external_ref_hash_type *extern_map = optimize_external_refs (die);
11017
11018 /* For now, optimize only the main CU, in order to optimize the rest
11019 we'd need to see all of them earlier. Leave the rest for post-linking
11020 tools like DWZ. */
11021 if (die == comp_unit_die ())
11022 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11023
11024 build_abbrev_table (die, extern_map);
11025
11026 optimize_abbrev_table ();
11027
11028 delete extern_map;
11029
11030 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11031 next_die_offset = (dwo_id
11032 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11033 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11034 calc_die_sizes (die);
11035
11036 oldsym = die->die_id.die_symbol;
11037 if (oldsym && die->comdat_type_p)
11038 {
11039 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11040
11041 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11042 secname = tmp;
11043 die->die_id.die_symbol = NULL;
11044 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11045 }
11046 else
11047 {
11048 switch_to_section (debug_info_section);
11049 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11050 info_section_emitted = true;
11051 }
11052
11053 /* For LTO cross unit DIE refs we want a symbol on the start of the
11054 debuginfo section, not on the CU DIE. */
11055 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11056 {
11057 /* ??? No way to get visibility assembled without a decl. */
11058 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11059 get_identifier (oldsym), char_type_node);
11060 TREE_PUBLIC (decl) = true;
11061 TREE_STATIC (decl) = true;
11062 DECL_ARTIFICIAL (decl) = true;
11063 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11064 DECL_VISIBILITY_SPECIFIED (decl) = true;
11065 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11066 #ifdef ASM_WEAKEN_LABEL
11067 /* We prefer a .weak because that handles duplicates from duplicate
11068 archive members in a graceful way. */
11069 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11070 #else
11071 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11072 #endif
11073 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11074 }
11075
11076 /* Output debugging information. */
11077 output_compilation_unit_header (dwo_id
11078 ? DW_UT_split_compile : DW_UT_compile);
11079 if (dwarf_version >= 5)
11080 {
11081 if (dwo_id != NULL)
11082 for (int i = 0; i < 8; i++)
11083 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11084 }
11085 output_die (die);
11086
11087 /* Leave the marks on the main CU, so we can check them in
11088 output_pubnames. */
11089 if (oldsym)
11090 {
11091 unmark_dies (die);
11092 die->die_id.die_symbol = oldsym;
11093 }
11094 }
11095
11096 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11097 and .debug_pubtypes. This is configured per-target, but can be
11098 overridden by the -gpubnames or -gno-pubnames options. */
11099
11100 static inline bool
11101 want_pubnames (void)
11102 {
11103 if (debug_info_level <= DINFO_LEVEL_TERSE)
11104 return false;
11105 if (debug_generate_pub_sections != -1)
11106 return debug_generate_pub_sections;
11107 return targetm.want_debug_pub_sections;
11108 }
11109
11110 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11111
11112 static void
11113 add_AT_pubnames (dw_die_ref die)
11114 {
11115 if (want_pubnames ())
11116 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11117 }
11118
11119 /* Add a string attribute value to a skeleton DIE. */
11120
11121 static inline void
11122 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11123 const char *str)
11124 {
11125 dw_attr_node attr;
11126 struct indirect_string_node *node;
11127
11128 if (! skeleton_debug_str_hash)
11129 skeleton_debug_str_hash
11130 = hash_table<indirect_string_hasher>::create_ggc (10);
11131
11132 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11133 find_string_form (node);
11134 if (node->form == dwarf_FORM (DW_FORM_strx))
11135 node->form = DW_FORM_strp;
11136
11137 attr.dw_attr = attr_kind;
11138 attr.dw_attr_val.val_class = dw_val_class_str;
11139 attr.dw_attr_val.val_entry = NULL;
11140 attr.dw_attr_val.v.val_str = node;
11141 add_dwarf_attr (die, &attr);
11142 }
11143
11144 /* Helper function to generate top-level dies for skeleton debug_info and
11145 debug_types. */
11146
11147 static void
11148 add_top_level_skeleton_die_attrs (dw_die_ref die)
11149 {
11150 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11151 const char *comp_dir = comp_dir_string ();
11152
11153 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11154 if (comp_dir != NULL)
11155 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11156 add_AT_pubnames (die);
11157 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11158 }
11159
11160 /* Output skeleton debug sections that point to the dwo file. */
11161
11162 static void
11163 output_skeleton_debug_sections (dw_die_ref comp_unit,
11164 const unsigned char *dwo_id)
11165 {
11166 /* These attributes will be found in the full debug_info section. */
11167 remove_AT (comp_unit, DW_AT_producer);
11168 remove_AT (comp_unit, DW_AT_language);
11169
11170 switch_to_section (debug_skeleton_info_section);
11171 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11172
11173 /* Produce the skeleton compilation-unit header. This one differs enough from
11174 a normal CU header that it's better not to call output_compilation_unit
11175 header. */
11176 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11177 dw2_asm_output_data (4, 0xffffffff,
11178 "Initial length escape value indicating 64-bit "
11179 "DWARF extension");
11180
11181 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11182 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11183 - DWARF_INITIAL_LENGTH_SIZE
11184 + size_of_die (comp_unit),
11185 "Length of Compilation Unit Info");
11186 output_dwarf_version ();
11187 if (dwarf_version >= 5)
11188 {
11189 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11190 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11191 }
11192 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11193 debug_skeleton_abbrev_section,
11194 "Offset Into Abbrev. Section");
11195 if (dwarf_version < 5)
11196 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11197 else
11198 for (int i = 0; i < 8; i++)
11199 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11200
11201 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11202 output_die (comp_unit);
11203
11204 /* Build the skeleton debug_abbrev section. */
11205 switch_to_section (debug_skeleton_abbrev_section);
11206 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11207
11208 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11209
11210 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11211 }
11212
11213 /* Output a comdat type unit DIE and its children. */
11214
11215 static void
11216 output_comdat_type_unit (comdat_type_node *node)
11217 {
11218 const char *secname;
11219 char *tmp;
11220 int i;
11221 #if defined (OBJECT_FORMAT_ELF)
11222 tree comdat_key;
11223 #endif
11224
11225 /* First mark all the DIEs in this CU so we know which get local refs. */
11226 mark_dies (node->root_die);
11227
11228 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11229
11230 build_abbrev_table (node->root_die, extern_map);
11231
11232 delete extern_map;
11233 extern_map = NULL;
11234
11235 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11236 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11237 calc_die_sizes (node->root_die);
11238
11239 #if defined (OBJECT_FORMAT_ELF)
11240 if (dwarf_version >= 5)
11241 {
11242 if (!dwarf_split_debug_info)
11243 secname = ".debug_info";
11244 else
11245 secname = ".debug_info.dwo";
11246 }
11247 else if (!dwarf_split_debug_info)
11248 secname = ".debug_types";
11249 else
11250 secname = ".debug_types.dwo";
11251
11252 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11253 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11254 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11255 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11256 comdat_key = get_identifier (tmp);
11257 targetm.asm_out.named_section (secname,
11258 SECTION_DEBUG | SECTION_LINKONCE,
11259 comdat_key);
11260 #else
11261 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11262 sprintf (tmp, (dwarf_version >= 5
11263 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11264 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11265 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11266 secname = tmp;
11267 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11268 #endif
11269
11270 /* Output debugging information. */
11271 output_compilation_unit_header (dwarf_split_debug_info
11272 ? DW_UT_split_type : DW_UT_type);
11273 output_signature (node->signature, "Type Signature");
11274 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11275 "Offset to Type DIE");
11276 output_die (node->root_die);
11277
11278 unmark_dies (node->root_die);
11279 }
11280
11281 /* Return the DWARF2/3 pubname associated with a decl. */
11282
11283 static const char *
11284 dwarf2_name (tree decl, int scope)
11285 {
11286 if (DECL_NAMELESS (decl))
11287 return NULL;
11288 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11289 }
11290
11291 /* Add a new entry to .debug_pubnames if appropriate. */
11292
11293 static void
11294 add_pubname_string (const char *str, dw_die_ref die)
11295 {
11296 pubname_entry e;
11297
11298 e.die = die;
11299 e.name = xstrdup (str);
11300 vec_safe_push (pubname_table, e);
11301 }
11302
11303 static void
11304 add_pubname (tree decl, dw_die_ref die)
11305 {
11306 if (!want_pubnames ())
11307 return;
11308
11309 /* Don't add items to the table when we expect that the consumer will have
11310 just read the enclosing die. For example, if the consumer is looking at a
11311 class_member, it will either be inside the class already, or will have just
11312 looked up the class to find the member. Either way, searching the class is
11313 faster than searching the index. */
11314 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11315 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11316 {
11317 const char *name = dwarf2_name (decl, 1);
11318
11319 if (name)
11320 add_pubname_string (name, die);
11321 }
11322 }
11323
11324 /* Add an enumerator to the pubnames section. */
11325
11326 static void
11327 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11328 {
11329 pubname_entry e;
11330
11331 gcc_assert (scope_name);
11332 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11333 e.die = die;
11334 vec_safe_push (pubname_table, e);
11335 }
11336
11337 /* Add a new entry to .debug_pubtypes if appropriate. */
11338
11339 static void
11340 add_pubtype (tree decl, dw_die_ref die)
11341 {
11342 pubname_entry e;
11343
11344 if (!want_pubnames ())
11345 return;
11346
11347 if ((TREE_PUBLIC (decl)
11348 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11349 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11350 {
11351 tree scope = NULL;
11352 const char *scope_name = "";
11353 const char *sep = is_cxx () ? "::" : ".";
11354 const char *name;
11355
11356 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11357 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11358 {
11359 scope_name = lang_hooks.dwarf_name (scope, 1);
11360 if (scope_name != NULL && scope_name[0] != '\0')
11361 scope_name = concat (scope_name, sep, NULL);
11362 else
11363 scope_name = "";
11364 }
11365
11366 if (TYPE_P (decl))
11367 name = type_tag (decl);
11368 else
11369 name = lang_hooks.dwarf_name (decl, 1);
11370
11371 /* If we don't have a name for the type, there's no point in adding
11372 it to the table. */
11373 if (name != NULL && name[0] != '\0')
11374 {
11375 e.die = die;
11376 e.name = concat (scope_name, name, NULL);
11377 vec_safe_push (pubtype_table, e);
11378 }
11379
11380 /* Although it might be more consistent to add the pubinfo for the
11381 enumerators as their dies are created, they should only be added if the
11382 enum type meets the criteria above. So rather than re-check the parent
11383 enum type whenever an enumerator die is created, just output them all
11384 here. This isn't protected by the name conditional because anonymous
11385 enums don't have names. */
11386 if (die->die_tag == DW_TAG_enumeration_type)
11387 {
11388 dw_die_ref c;
11389
11390 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11391 }
11392 }
11393 }
11394
11395 /* Output a single entry in the pubnames table. */
11396
11397 static void
11398 output_pubname (dw_offset die_offset, pubname_entry *entry)
11399 {
11400 dw_die_ref die = entry->die;
11401 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11402
11403 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11404
11405 if (debug_generate_pub_sections == 2)
11406 {
11407 /* This logic follows gdb's method for determining the value of the flag
11408 byte. */
11409 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11410 switch (die->die_tag)
11411 {
11412 case DW_TAG_typedef:
11413 case DW_TAG_base_type:
11414 case DW_TAG_subrange_type:
11415 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11416 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11417 break;
11418 case DW_TAG_enumerator:
11419 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11420 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11421 if (!is_cxx ())
11422 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11423 break;
11424 case DW_TAG_subprogram:
11425 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11426 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11427 if (!is_ada ())
11428 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11429 break;
11430 case DW_TAG_constant:
11431 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11432 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11433 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11434 break;
11435 case DW_TAG_variable:
11436 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11437 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11438 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11439 break;
11440 case DW_TAG_namespace:
11441 case DW_TAG_imported_declaration:
11442 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11443 break;
11444 case DW_TAG_class_type:
11445 case DW_TAG_interface_type:
11446 case DW_TAG_structure_type:
11447 case DW_TAG_union_type:
11448 case DW_TAG_enumeration_type:
11449 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11450 if (!is_cxx ())
11451 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11452 break;
11453 default:
11454 /* An unusual tag. Leave the flag-byte empty. */
11455 break;
11456 }
11457 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11458 "GDB-index flags");
11459 }
11460
11461 dw2_asm_output_nstring (entry->name, -1, "external name");
11462 }
11463
11464
11465 /* Output the public names table used to speed up access to externally
11466 visible names; or the public types table used to find type definitions. */
11467
11468 static void
11469 output_pubnames (vec<pubname_entry, va_gc> *names)
11470 {
11471 unsigned i;
11472 unsigned long pubnames_length = size_of_pubnames (names);
11473 pubname_entry *pub;
11474
11475 if (!XCOFF_DEBUGGING_INFO)
11476 {
11477 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11478 dw2_asm_output_data (4, 0xffffffff,
11479 "Initial length escape value indicating 64-bit DWARF extension");
11480 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11481 "Pub Info Length");
11482 }
11483
11484 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11485 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11486
11487 if (dwarf_split_debug_info)
11488 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11489 debug_skeleton_info_section,
11490 "Offset of Compilation Unit Info");
11491 else
11492 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11493 debug_info_section,
11494 "Offset of Compilation Unit Info");
11495 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11496 "Compilation Unit Length");
11497
11498 FOR_EACH_VEC_ELT (*names, i, pub)
11499 {
11500 if (include_pubname_in_output (names, pub))
11501 {
11502 dw_offset die_offset = pub->die->die_offset;
11503
11504 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11505 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11506 gcc_assert (pub->die->die_mark);
11507
11508 /* If we're putting types in their own .debug_types sections,
11509 the .debug_pubtypes table will still point to the compile
11510 unit (not the type unit), so we want to use the offset of
11511 the skeleton DIE (if there is one). */
11512 if (pub->die->comdat_type_p && names == pubtype_table)
11513 {
11514 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11515
11516 if (type_node != NULL)
11517 die_offset = (type_node->skeleton_die != NULL
11518 ? type_node->skeleton_die->die_offset
11519 : comp_unit_die ()->die_offset);
11520 }
11521
11522 output_pubname (die_offset, pub);
11523 }
11524 }
11525
11526 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11527 }
11528
11529 /* Output public names and types tables if necessary. */
11530
11531 static void
11532 output_pubtables (void)
11533 {
11534 if (!want_pubnames () || !info_section_emitted)
11535 return;
11536
11537 switch_to_section (debug_pubnames_section);
11538 output_pubnames (pubname_table);
11539 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11540 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11541 simply won't look for the section. */
11542 switch_to_section (debug_pubtypes_section);
11543 output_pubnames (pubtype_table);
11544 }
11545
11546
11547 /* Output the information that goes into the .debug_aranges table.
11548 Namely, define the beginning and ending address range of the
11549 text section generated for this compilation unit. */
11550
11551 static void
11552 output_aranges (void)
11553 {
11554 unsigned i;
11555 unsigned long aranges_length = size_of_aranges ();
11556
11557 if (!XCOFF_DEBUGGING_INFO)
11558 {
11559 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11560 dw2_asm_output_data (4, 0xffffffff,
11561 "Initial length escape value indicating 64-bit DWARF extension");
11562 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11563 "Length of Address Ranges Info");
11564 }
11565
11566 /* Version number for aranges is still 2, even up to DWARF5. */
11567 dw2_asm_output_data (2, 2, "DWARF aranges version");
11568 if (dwarf_split_debug_info)
11569 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11570 debug_skeleton_info_section,
11571 "Offset of Compilation Unit Info");
11572 else
11573 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11574 debug_info_section,
11575 "Offset of Compilation Unit Info");
11576 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11577 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11578
11579 /* We need to align to twice the pointer size here. */
11580 if (DWARF_ARANGES_PAD_SIZE)
11581 {
11582 /* Pad using a 2 byte words so that padding is correct for any
11583 pointer size. */
11584 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11585 2 * DWARF2_ADDR_SIZE);
11586 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11587 dw2_asm_output_data (2, 0, NULL);
11588 }
11589
11590 /* It is necessary not to output these entries if the sections were
11591 not used; if the sections were not used, the length will be 0 and
11592 the address may end up as 0 if the section is discarded by ld
11593 --gc-sections, leaving an invalid (0, 0) entry that can be
11594 confused with the terminator. */
11595 if (text_section_used)
11596 {
11597 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11598 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11599 text_section_label, "Length");
11600 }
11601 if (cold_text_section_used)
11602 {
11603 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11604 "Address");
11605 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11606 cold_text_section_label, "Length");
11607 }
11608
11609 if (have_multiple_function_sections)
11610 {
11611 unsigned fde_idx;
11612 dw_fde_ref fde;
11613
11614 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11615 {
11616 if (DECL_IGNORED_P (fde->decl))
11617 continue;
11618 if (!fde->in_std_section)
11619 {
11620 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11621 "Address");
11622 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11623 fde->dw_fde_begin, "Length");
11624 }
11625 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11626 {
11627 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11628 "Address");
11629 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11630 fde->dw_fde_second_begin, "Length");
11631 }
11632 }
11633 }
11634
11635 /* Output the terminator words. */
11636 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11637 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11638 }
11639
11640 /* Add a new entry to .debug_ranges. Return its index into
11641 ranges_table vector. */
11642
11643 static unsigned int
11644 add_ranges_num (int num, bool maybe_new_sec)
11645 {
11646 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11647 vec_safe_push (ranges_table, r);
11648 return vec_safe_length (ranges_table) - 1;
11649 }
11650
11651 /* Add a new entry to .debug_ranges corresponding to a block, or a
11652 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11653 this entry might be in a different section from previous range. */
11654
11655 static unsigned int
11656 add_ranges (const_tree block, bool maybe_new_sec)
11657 {
11658 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11659 }
11660
11661 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11662 chain, or middle entry of a chain that will be directly referred to. */
11663
11664 static void
11665 note_rnglist_head (unsigned int offset)
11666 {
11667 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11668 return;
11669 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11670 }
11671
11672 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11673 When using dwarf_split_debug_info, address attributes in dies destined
11674 for the final executable should be direct references--setting the
11675 parameter force_direct ensures this behavior. */
11676
11677 static void
11678 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11679 bool *added, bool force_direct)
11680 {
11681 unsigned int in_use = vec_safe_length (ranges_by_label);
11682 unsigned int offset;
11683 dw_ranges_by_label rbl = { begin, end };
11684 vec_safe_push (ranges_by_label, rbl);
11685 offset = add_ranges_num (-(int)in_use - 1, true);
11686 if (!*added)
11687 {
11688 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11689 *added = true;
11690 note_rnglist_head (offset);
11691 }
11692 }
11693
11694 /* Emit .debug_ranges section. */
11695
11696 static void
11697 output_ranges (void)
11698 {
11699 unsigned i;
11700 static const char *const start_fmt = "Offset %#x";
11701 const char *fmt = start_fmt;
11702 dw_ranges *r;
11703
11704 switch_to_section (debug_ranges_section);
11705 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11706 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11707 {
11708 int block_num = r->num;
11709
11710 if (block_num > 0)
11711 {
11712 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11713 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11714
11715 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11716 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11717
11718 /* If all code is in the text section, then the compilation
11719 unit base address defaults to DW_AT_low_pc, which is the
11720 base of the text section. */
11721 if (!have_multiple_function_sections)
11722 {
11723 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11724 text_section_label,
11725 fmt, i * 2 * DWARF2_ADDR_SIZE);
11726 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11727 text_section_label, NULL);
11728 }
11729
11730 /* Otherwise, the compilation unit base address is zero,
11731 which allows us to use absolute addresses, and not worry
11732 about whether the target supports cross-section
11733 arithmetic. */
11734 else
11735 {
11736 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11737 fmt, i * 2 * DWARF2_ADDR_SIZE);
11738 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11739 }
11740
11741 fmt = NULL;
11742 }
11743
11744 /* Negative block_num stands for an index into ranges_by_label. */
11745 else if (block_num < 0)
11746 {
11747 int lab_idx = - block_num - 1;
11748
11749 if (!have_multiple_function_sections)
11750 {
11751 gcc_unreachable ();
11752 #if 0
11753 /* If we ever use add_ranges_by_labels () for a single
11754 function section, all we have to do is to take out
11755 the #if 0 above. */
11756 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11757 (*ranges_by_label)[lab_idx].begin,
11758 text_section_label,
11759 fmt, i * 2 * DWARF2_ADDR_SIZE);
11760 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11761 (*ranges_by_label)[lab_idx].end,
11762 text_section_label, NULL);
11763 #endif
11764 }
11765 else
11766 {
11767 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11768 (*ranges_by_label)[lab_idx].begin,
11769 fmt, i * 2 * DWARF2_ADDR_SIZE);
11770 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11771 (*ranges_by_label)[lab_idx].end,
11772 NULL);
11773 }
11774 }
11775 else
11776 {
11777 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11778 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11779 fmt = start_fmt;
11780 }
11781 }
11782 }
11783
11784 /* Non-zero if .debug_line_str should be used for .debug_line section
11785 strings or strings that are likely shareable with those. */
11786 #define DWARF5_USE_DEBUG_LINE_STR \
11787 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11788 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11789 /* FIXME: there is no .debug_line_str.dwo section, \
11790 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11791 && !dwarf_split_debug_info)
11792
11793 /* Assign .debug_rnglists indexes. */
11794
11795 static void
11796 index_rnglists (void)
11797 {
11798 unsigned i;
11799 dw_ranges *r;
11800
11801 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11802 if (r->label)
11803 r->idx = rnglist_idx++;
11804 }
11805
11806 /* Emit .debug_rnglists section. */
11807
11808 static void
11809 output_rnglists (unsigned generation)
11810 {
11811 unsigned i;
11812 dw_ranges *r;
11813 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11814 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11815 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11816
11817 switch_to_section (debug_ranges_section);
11818 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11819 /* There are up to 4 unique ranges labels per generation.
11820 See also init_sections_and_labels. */
11821 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11822 2 + generation * 4);
11823 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11824 3 + generation * 4);
11825 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11826 dw2_asm_output_data (4, 0xffffffff,
11827 "Initial length escape value indicating "
11828 "64-bit DWARF extension");
11829 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11830 "Length of Range Lists");
11831 ASM_OUTPUT_LABEL (asm_out_file, l1);
11832 output_dwarf_version ();
11833 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11834 dw2_asm_output_data (1, 0, "Segment Size");
11835 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11836 about relocation sizes and primarily care about the size of .debug*
11837 sections in linked shared libraries and executables, then
11838 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11839 into it are usually larger than just DW_FORM_sec_offset offsets
11840 into the .debug_rnglists section. */
11841 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11842 "Offset Entry Count");
11843 if (dwarf_split_debug_info)
11844 {
11845 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11846 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11847 if (r->label)
11848 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11849 ranges_base_label, NULL);
11850 }
11851
11852 const char *lab = "";
11853 unsigned int len = vec_safe_length (ranges_table);
11854 const char *base = NULL;
11855 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11856 {
11857 int block_num = r->num;
11858
11859 if (r->label)
11860 {
11861 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11862 lab = r->label;
11863 }
11864 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11865 base = NULL;
11866 if (block_num > 0)
11867 {
11868 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11869 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11870
11871 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11872 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11873
11874 if (HAVE_AS_LEB128)
11875 {
11876 /* If all code is in the text section, then the compilation
11877 unit base address defaults to DW_AT_low_pc, which is the
11878 base of the text section. */
11879 if (!have_multiple_function_sections)
11880 {
11881 dw2_asm_output_data (1, DW_RLE_offset_pair,
11882 "DW_RLE_offset_pair (%s)", lab);
11883 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11884 "Range begin address (%s)", lab);
11885 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11886 "Range end address (%s)", lab);
11887 continue;
11888 }
11889 if (base == NULL)
11890 {
11891 dw_ranges *r2 = NULL;
11892 if (i < len - 1)
11893 r2 = &(*ranges_table)[i + 1];
11894 if (r2
11895 && r2->num != 0
11896 && r2->label == NULL
11897 && !r2->maybe_new_sec)
11898 {
11899 dw2_asm_output_data (1, DW_RLE_base_address,
11900 "DW_RLE_base_address (%s)", lab);
11901 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11902 "Base address (%s)", lab);
11903 strcpy (basebuf, blabel);
11904 base = basebuf;
11905 }
11906 }
11907 if (base)
11908 {
11909 dw2_asm_output_data (1, DW_RLE_offset_pair,
11910 "DW_RLE_offset_pair (%s)", lab);
11911 dw2_asm_output_delta_uleb128 (blabel, base,
11912 "Range begin address (%s)", lab);
11913 dw2_asm_output_delta_uleb128 (elabel, base,
11914 "Range end address (%s)", lab);
11915 continue;
11916 }
11917 dw2_asm_output_data (1, DW_RLE_start_length,
11918 "DW_RLE_start_length (%s)", lab);
11919 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11920 "Range begin address (%s)", lab);
11921 dw2_asm_output_delta_uleb128 (elabel, blabel,
11922 "Range length (%s)", lab);
11923 }
11924 else
11925 {
11926 dw2_asm_output_data (1, DW_RLE_start_end,
11927 "DW_RLE_start_end (%s)", lab);
11928 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11929 "Range begin address (%s)", lab);
11930 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11931 "Range end address (%s)", lab);
11932 }
11933 }
11934
11935 /* Negative block_num stands for an index into ranges_by_label. */
11936 else if (block_num < 0)
11937 {
11938 int lab_idx = - block_num - 1;
11939 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11940 const char *elabel = (*ranges_by_label)[lab_idx].end;
11941
11942 if (!have_multiple_function_sections)
11943 gcc_unreachable ();
11944 if (HAVE_AS_LEB128)
11945 {
11946 dw2_asm_output_data (1, DW_RLE_start_length,
11947 "DW_RLE_start_length (%s)", lab);
11948 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11949 "Range begin address (%s)", lab);
11950 dw2_asm_output_delta_uleb128 (elabel, blabel,
11951 "Range length (%s)", lab);
11952 }
11953 else
11954 {
11955 dw2_asm_output_data (1, DW_RLE_start_end,
11956 "DW_RLE_start_end (%s)", lab);
11957 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11958 "Range begin address (%s)", lab);
11959 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11960 "Range end address (%s)", lab);
11961 }
11962 }
11963 else
11964 dw2_asm_output_data (1, DW_RLE_end_of_list,
11965 "DW_RLE_end_of_list (%s)", lab);
11966 }
11967 ASM_OUTPUT_LABEL (asm_out_file, l2);
11968 }
11969
11970 /* Data structure containing information about input files. */
11971 struct file_info
11972 {
11973 const char *path; /* Complete file name. */
11974 const char *fname; /* File name part. */
11975 int length; /* Length of entire string. */
11976 struct dwarf_file_data * file_idx; /* Index in input file table. */
11977 int dir_idx; /* Index in directory table. */
11978 };
11979
11980 /* Data structure containing information about directories with source
11981 files. */
11982 struct dir_info
11983 {
11984 const char *path; /* Path including directory name. */
11985 int length; /* Path length. */
11986 int prefix; /* Index of directory entry which is a prefix. */
11987 int count; /* Number of files in this directory. */
11988 int dir_idx; /* Index of directory used as base. */
11989 };
11990
11991 /* Callback function for file_info comparison. We sort by looking at
11992 the directories in the path. */
11993
11994 static int
11995 file_info_cmp (const void *p1, const void *p2)
11996 {
11997 const struct file_info *const s1 = (const struct file_info *) p1;
11998 const struct file_info *const s2 = (const struct file_info *) p2;
11999 const unsigned char *cp1;
12000 const unsigned char *cp2;
12001
12002 /* Take care of file names without directories. We need to make sure that
12003 we return consistent values to qsort since some will get confused if
12004 we return the same value when identical operands are passed in opposite
12005 orders. So if neither has a directory, return 0 and otherwise return
12006 1 or -1 depending on which one has the directory. We want the one with
12007 the directory to sort after the one without, so all no directory files
12008 are at the start (normally only the compilation unit file). */
12009 if ((s1->path == s1->fname || s2->path == s2->fname))
12010 return (s2->path == s2->fname) - (s1->path == s1->fname);
12011
12012 cp1 = (const unsigned char *) s1->path;
12013 cp2 = (const unsigned char *) s2->path;
12014
12015 while (1)
12016 {
12017 ++cp1;
12018 ++cp2;
12019 /* Reached the end of the first path? If so, handle like above,
12020 but now we want longer directory prefixes before shorter ones. */
12021 if ((cp1 == (const unsigned char *) s1->fname)
12022 || (cp2 == (const unsigned char *) s2->fname))
12023 return ((cp1 == (const unsigned char *) s1->fname)
12024 - (cp2 == (const unsigned char *) s2->fname));
12025
12026 /* Character of current path component the same? */
12027 else if (*cp1 != *cp2)
12028 return *cp1 - *cp2;
12029 }
12030 }
12031
12032 struct file_name_acquire_data
12033 {
12034 struct file_info *files;
12035 int used_files;
12036 int max_files;
12037 };
12038
12039 /* Traversal function for the hash table. */
12040
12041 int
12042 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12043 {
12044 struct dwarf_file_data *d = *slot;
12045 struct file_info *fi;
12046 const char *f;
12047
12048 gcc_assert (fnad->max_files >= d->emitted_number);
12049
12050 if (! d->emitted_number)
12051 return 1;
12052
12053 gcc_assert (fnad->max_files != fnad->used_files);
12054
12055 fi = fnad->files + fnad->used_files++;
12056
12057 /* Skip all leading "./". */
12058 f = d->filename;
12059 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12060 f += 2;
12061
12062 /* Create a new array entry. */
12063 fi->path = f;
12064 fi->length = strlen (f);
12065 fi->file_idx = d;
12066
12067 /* Search for the file name part. */
12068 f = strrchr (f, DIR_SEPARATOR);
12069 #if defined (DIR_SEPARATOR_2)
12070 {
12071 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12072
12073 if (g != NULL)
12074 {
12075 if (f == NULL || f < g)
12076 f = g;
12077 }
12078 }
12079 #endif
12080
12081 fi->fname = f == NULL ? fi->path : f + 1;
12082 return 1;
12083 }
12084
12085 /* Helper function for output_file_names. Emit a FORM encoded
12086 string STR, with assembly comment start ENTRY_KIND and
12087 index IDX */
12088
12089 static void
12090 output_line_string (enum dwarf_form form, const char *str,
12091 const char *entry_kind, unsigned int idx)
12092 {
12093 switch (form)
12094 {
12095 case DW_FORM_string:
12096 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12097 break;
12098 case DW_FORM_line_strp:
12099 if (!debug_line_str_hash)
12100 debug_line_str_hash
12101 = hash_table<indirect_string_hasher>::create_ggc (10);
12102
12103 struct indirect_string_node *node;
12104 node = find_AT_string_in_table (str, debug_line_str_hash);
12105 set_indirect_string (node);
12106 node->form = form;
12107 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12108 debug_line_str_section, "%s: %#x: \"%s\"",
12109 entry_kind, 0, node->str);
12110 break;
12111 default:
12112 gcc_unreachable ();
12113 }
12114 }
12115
12116 /* Output the directory table and the file name table. We try to minimize
12117 the total amount of memory needed. A heuristic is used to avoid large
12118 slowdowns with many input files. */
12119
12120 static void
12121 output_file_names (void)
12122 {
12123 struct file_name_acquire_data fnad;
12124 int numfiles;
12125 struct file_info *files;
12126 struct dir_info *dirs;
12127 int *saved;
12128 int *savehere;
12129 int *backmap;
12130 int ndirs;
12131 int idx_offset;
12132 int i;
12133
12134 if (!last_emitted_file)
12135 {
12136 if (dwarf_version >= 5)
12137 {
12138 dw2_asm_output_data (1, 0, "Directory entry format count");
12139 dw2_asm_output_data_uleb128 (0, "Directories count");
12140 dw2_asm_output_data (1, 0, "File name entry format count");
12141 dw2_asm_output_data_uleb128 (0, "File names count");
12142 }
12143 else
12144 {
12145 dw2_asm_output_data (1, 0, "End directory table");
12146 dw2_asm_output_data (1, 0, "End file name table");
12147 }
12148 return;
12149 }
12150
12151 numfiles = last_emitted_file->emitted_number;
12152
12153 /* Allocate the various arrays we need. */
12154 files = XALLOCAVEC (struct file_info, numfiles);
12155 dirs = XALLOCAVEC (struct dir_info, numfiles);
12156
12157 fnad.files = files;
12158 fnad.used_files = 0;
12159 fnad.max_files = numfiles;
12160 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12161 gcc_assert (fnad.used_files == fnad.max_files);
12162
12163 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12164
12165 /* Find all the different directories used. */
12166 dirs[0].path = files[0].path;
12167 dirs[0].length = files[0].fname - files[0].path;
12168 dirs[0].prefix = -1;
12169 dirs[0].count = 1;
12170 dirs[0].dir_idx = 0;
12171 files[0].dir_idx = 0;
12172 ndirs = 1;
12173
12174 for (i = 1; i < numfiles; i++)
12175 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12176 && memcmp (dirs[ndirs - 1].path, files[i].path,
12177 dirs[ndirs - 1].length) == 0)
12178 {
12179 /* Same directory as last entry. */
12180 files[i].dir_idx = ndirs - 1;
12181 ++dirs[ndirs - 1].count;
12182 }
12183 else
12184 {
12185 int j;
12186
12187 /* This is a new directory. */
12188 dirs[ndirs].path = files[i].path;
12189 dirs[ndirs].length = files[i].fname - files[i].path;
12190 dirs[ndirs].count = 1;
12191 dirs[ndirs].dir_idx = ndirs;
12192 files[i].dir_idx = ndirs;
12193
12194 /* Search for a prefix. */
12195 dirs[ndirs].prefix = -1;
12196 for (j = 0; j < ndirs; j++)
12197 if (dirs[j].length < dirs[ndirs].length
12198 && dirs[j].length > 1
12199 && (dirs[ndirs].prefix == -1
12200 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12201 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12202 dirs[ndirs].prefix = j;
12203
12204 ++ndirs;
12205 }
12206
12207 /* Now to the actual work. We have to find a subset of the directories which
12208 allow expressing the file name using references to the directory table
12209 with the least amount of characters. We do not do an exhaustive search
12210 where we would have to check out every combination of every single
12211 possible prefix. Instead we use a heuristic which provides nearly optimal
12212 results in most cases and never is much off. */
12213 saved = XALLOCAVEC (int, ndirs);
12214 savehere = XALLOCAVEC (int, ndirs);
12215
12216 memset (saved, '\0', ndirs * sizeof (saved[0]));
12217 for (i = 0; i < ndirs; i++)
12218 {
12219 int j;
12220 int total;
12221
12222 /* We can always save some space for the current directory. But this
12223 does not mean it will be enough to justify adding the directory. */
12224 savehere[i] = dirs[i].length;
12225 total = (savehere[i] - saved[i]) * dirs[i].count;
12226
12227 for (j = i + 1; j < ndirs; j++)
12228 {
12229 savehere[j] = 0;
12230 if (saved[j] < dirs[i].length)
12231 {
12232 /* Determine whether the dirs[i] path is a prefix of the
12233 dirs[j] path. */
12234 int k;
12235
12236 k = dirs[j].prefix;
12237 while (k != -1 && k != (int) i)
12238 k = dirs[k].prefix;
12239
12240 if (k == (int) i)
12241 {
12242 /* Yes it is. We can possibly save some memory by
12243 writing the filenames in dirs[j] relative to
12244 dirs[i]. */
12245 savehere[j] = dirs[i].length;
12246 total += (savehere[j] - saved[j]) * dirs[j].count;
12247 }
12248 }
12249 }
12250
12251 /* Check whether we can save enough to justify adding the dirs[i]
12252 directory. */
12253 if (total > dirs[i].length + 1)
12254 {
12255 /* It's worthwhile adding. */
12256 for (j = i; j < ndirs; j++)
12257 if (savehere[j] > 0)
12258 {
12259 /* Remember how much we saved for this directory so far. */
12260 saved[j] = savehere[j];
12261
12262 /* Remember the prefix directory. */
12263 dirs[j].dir_idx = i;
12264 }
12265 }
12266 }
12267
12268 /* Emit the directory name table. */
12269 idx_offset = dirs[0].length > 0 ? 1 : 0;
12270 enum dwarf_form str_form = DW_FORM_string;
12271 enum dwarf_form idx_form = DW_FORM_udata;
12272 if (dwarf_version >= 5)
12273 {
12274 const char *comp_dir = comp_dir_string ();
12275 if (comp_dir == NULL)
12276 comp_dir = "";
12277 dw2_asm_output_data (1, 1, "Directory entry format count");
12278 if (DWARF5_USE_DEBUG_LINE_STR)
12279 str_form = DW_FORM_line_strp;
12280 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12281 dw2_asm_output_data_uleb128 (str_form, "%s",
12282 get_DW_FORM_name (str_form));
12283 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12284 if (str_form == DW_FORM_string)
12285 {
12286 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12287 for (i = 1 - idx_offset; i < ndirs; i++)
12288 dw2_asm_output_nstring (dirs[i].path,
12289 dirs[i].length
12290 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12291 "Directory Entry: %#x", i + idx_offset);
12292 }
12293 else
12294 {
12295 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12296 for (i = 1 - idx_offset; i < ndirs; i++)
12297 {
12298 const char *str
12299 = ggc_alloc_string (dirs[i].path,
12300 dirs[i].length
12301 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12302 output_line_string (str_form, str, "Directory Entry",
12303 (unsigned) i + idx_offset);
12304 }
12305 }
12306 }
12307 else
12308 {
12309 for (i = 1 - idx_offset; i < ndirs; i++)
12310 dw2_asm_output_nstring (dirs[i].path,
12311 dirs[i].length
12312 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12313 "Directory Entry: %#x", i + idx_offset);
12314
12315 dw2_asm_output_data (1, 0, "End directory table");
12316 }
12317
12318 /* We have to emit them in the order of emitted_number since that's
12319 used in the debug info generation. To do this efficiently we
12320 generate a back-mapping of the indices first. */
12321 backmap = XALLOCAVEC (int, numfiles);
12322 for (i = 0; i < numfiles; i++)
12323 backmap[files[i].file_idx->emitted_number - 1] = i;
12324
12325 if (dwarf_version >= 5)
12326 {
12327 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12328 if (filename0 == NULL)
12329 filename0 = "";
12330 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12331 DW_FORM_data2. Choose one based on the number of directories
12332 and how much space would they occupy in each encoding.
12333 If we have at most 256 directories, all indexes fit into
12334 a single byte, so DW_FORM_data1 is most compact (if there
12335 are at most 128 directories, DW_FORM_udata would be as
12336 compact as that, but not shorter and slower to decode). */
12337 if (ndirs + idx_offset <= 256)
12338 idx_form = DW_FORM_data1;
12339 /* If there are more than 65536 directories, we have to use
12340 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12341 Otherwise, compute what space would occupy if all the indexes
12342 used DW_FORM_udata - sum - and compare that to how large would
12343 be DW_FORM_data2 encoding, and pick the more efficient one. */
12344 else if (ndirs + idx_offset <= 65536)
12345 {
12346 unsigned HOST_WIDE_INT sum = 1;
12347 for (i = 0; i < numfiles; i++)
12348 {
12349 int file_idx = backmap[i];
12350 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12351 sum += size_of_uleb128 (dir_idx);
12352 }
12353 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12354 idx_form = DW_FORM_data2;
12355 }
12356 #ifdef VMS_DEBUGGING_INFO
12357 dw2_asm_output_data (1, 4, "File name entry format count");
12358 #else
12359 dw2_asm_output_data (1, 2, "File name entry format count");
12360 #endif
12361 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12362 dw2_asm_output_data_uleb128 (str_form, "%s",
12363 get_DW_FORM_name (str_form));
12364 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12365 "DW_LNCT_directory_index");
12366 dw2_asm_output_data_uleb128 (idx_form, "%s",
12367 get_DW_FORM_name (idx_form));
12368 #ifdef VMS_DEBUGGING_INFO
12369 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12370 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12371 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12372 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12373 #endif
12374 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12375
12376 output_line_string (str_form, filename0, "File Entry", 0);
12377
12378 /* Include directory index. */
12379 if (idx_form != DW_FORM_udata)
12380 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12381 0, NULL);
12382 else
12383 dw2_asm_output_data_uleb128 (0, NULL);
12384
12385 #ifdef VMS_DEBUGGING_INFO
12386 dw2_asm_output_data_uleb128 (0, NULL);
12387 dw2_asm_output_data_uleb128 (0, NULL);
12388 #endif
12389 }
12390
12391 /* Now write all the file names. */
12392 for (i = 0; i < numfiles; i++)
12393 {
12394 int file_idx = backmap[i];
12395 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12396
12397 #ifdef VMS_DEBUGGING_INFO
12398 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12399
12400 /* Setting these fields can lead to debugger miscomparisons,
12401 but VMS Debug requires them to be set correctly. */
12402
12403 int ver;
12404 long long cdt;
12405 long siz;
12406 int maxfilelen = (strlen (files[file_idx].path)
12407 + dirs[dir_idx].length
12408 + MAX_VMS_VERSION_LEN + 1);
12409 char *filebuf = XALLOCAVEC (char, maxfilelen);
12410
12411 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12412 snprintf (filebuf, maxfilelen, "%s;%d",
12413 files[file_idx].path + dirs[dir_idx].length, ver);
12414
12415 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12416
12417 /* Include directory index. */
12418 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12419 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12420 dir_idx + idx_offset, NULL);
12421 else
12422 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12423
12424 /* Modification time. */
12425 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12426 &cdt, 0, 0, 0) == 0)
12427 ? cdt : 0, NULL);
12428
12429 /* File length in bytes. */
12430 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12431 0, &siz, 0, 0) == 0)
12432 ? siz : 0, NULL);
12433 #else
12434 output_line_string (str_form,
12435 files[file_idx].path + dirs[dir_idx].length,
12436 "File Entry", (unsigned) i + 1);
12437
12438 /* Include directory index. */
12439 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12440 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12441 dir_idx + idx_offset, NULL);
12442 else
12443 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12444
12445 if (dwarf_version >= 5)
12446 continue;
12447
12448 /* Modification time. */
12449 dw2_asm_output_data_uleb128 (0, NULL);
12450
12451 /* File length in bytes. */
12452 dw2_asm_output_data_uleb128 (0, NULL);
12453 #endif /* VMS_DEBUGGING_INFO */
12454 }
12455
12456 if (dwarf_version < 5)
12457 dw2_asm_output_data (1, 0, "End file name table");
12458 }
12459
12460
12461 /* Output one line number table into the .debug_line section. */
12462
12463 static void
12464 output_one_line_info_table (dw_line_info_table *table)
12465 {
12466 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12467 unsigned int current_line = 1;
12468 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12469 dw_line_info_entry *ent, *prev_addr;
12470 size_t i;
12471 unsigned int view;
12472
12473 view = 0;
12474
12475 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12476 {
12477 switch (ent->opcode)
12478 {
12479 case LI_set_address:
12480 /* ??? Unfortunately, we have little choice here currently, and
12481 must always use the most general form. GCC does not know the
12482 address delta itself, so we can't use DW_LNS_advance_pc. Many
12483 ports do have length attributes which will give an upper bound
12484 on the address range. We could perhaps use length attributes
12485 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12486 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12487
12488 view = 0;
12489
12490 /* This can handle any delta. This takes
12491 4+DWARF2_ADDR_SIZE bytes. */
12492 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12493 debug_variable_location_views
12494 ? ", reset view to 0" : "");
12495 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12496 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12497 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12498
12499 prev_addr = ent;
12500 break;
12501
12502 case LI_adv_address:
12503 {
12504 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12505 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12506 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12507
12508 view++;
12509
12510 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12511 dw2_asm_output_delta (2, line_label, prev_label,
12512 "from %s to %s", prev_label, line_label);
12513
12514 prev_addr = ent;
12515 break;
12516 }
12517
12518 case LI_set_line:
12519 if (ent->val == current_line)
12520 {
12521 /* We still need to start a new row, so output a copy insn. */
12522 dw2_asm_output_data (1, DW_LNS_copy,
12523 "copy line %u", current_line);
12524 }
12525 else
12526 {
12527 int line_offset = ent->val - current_line;
12528 int line_delta = line_offset - DWARF_LINE_BASE;
12529
12530 current_line = ent->val;
12531 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12532 {
12533 /* This can handle deltas from -10 to 234, using the current
12534 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12535 This takes 1 byte. */
12536 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12537 "line %u", current_line);
12538 }
12539 else
12540 {
12541 /* This can handle any delta. This takes at least 4 bytes,
12542 depending on the value being encoded. */
12543 dw2_asm_output_data (1, DW_LNS_advance_line,
12544 "advance to line %u", current_line);
12545 dw2_asm_output_data_sleb128 (line_offset, NULL);
12546 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12547 }
12548 }
12549 break;
12550
12551 case LI_set_file:
12552 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12553 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12554 break;
12555
12556 case LI_set_column:
12557 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12558 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12559 break;
12560
12561 case LI_negate_stmt:
12562 current_is_stmt = !current_is_stmt;
12563 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12564 "is_stmt %d", current_is_stmt);
12565 break;
12566
12567 case LI_set_prologue_end:
12568 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12569 "set prologue end");
12570 break;
12571
12572 case LI_set_epilogue_begin:
12573 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12574 "set epilogue begin");
12575 break;
12576
12577 case LI_set_discriminator:
12578 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12579 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12580 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12581 dw2_asm_output_data_uleb128 (ent->val, NULL);
12582 break;
12583 }
12584 }
12585
12586 /* Emit debug info for the address of the end of the table. */
12587 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12588 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12589 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12590 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12591
12592 dw2_asm_output_data (1, 0, "end sequence");
12593 dw2_asm_output_data_uleb128 (1, NULL);
12594 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12595 }
12596
12597 /* Output the source line number correspondence information. This
12598 information goes into the .debug_line section. */
12599
12600 static void
12601 output_line_info (bool prologue_only)
12602 {
12603 static unsigned int generation;
12604 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12605 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12606 bool saw_one = false;
12607 int opc;
12608
12609 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12610 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12611 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12612 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12613
12614 if (!XCOFF_DEBUGGING_INFO)
12615 {
12616 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12617 dw2_asm_output_data (4, 0xffffffff,
12618 "Initial length escape value indicating 64-bit DWARF extension");
12619 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12620 "Length of Source Line Info");
12621 }
12622
12623 ASM_OUTPUT_LABEL (asm_out_file, l1);
12624
12625 output_dwarf_version ();
12626 if (dwarf_version >= 5)
12627 {
12628 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12629 dw2_asm_output_data (1, 0, "Segment Size");
12630 }
12631 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12632 ASM_OUTPUT_LABEL (asm_out_file, p1);
12633
12634 /* Define the architecture-dependent minimum instruction length (in bytes).
12635 In this implementation of DWARF, this field is used for information
12636 purposes only. Since GCC generates assembly language, we have no
12637 a priori knowledge of how many instruction bytes are generated for each
12638 source line, and therefore can use only the DW_LNE_set_address and
12639 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12640 this as '1', which is "correct enough" for all architectures,
12641 and don't let the target override. */
12642 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12643
12644 if (dwarf_version >= 4)
12645 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12646 "Maximum Operations Per Instruction");
12647 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12648 "Default is_stmt_start flag");
12649 dw2_asm_output_data (1, DWARF_LINE_BASE,
12650 "Line Base Value (Special Opcodes)");
12651 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12652 "Line Range Value (Special Opcodes)");
12653 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12654 "Special Opcode Base");
12655
12656 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12657 {
12658 int n_op_args;
12659 switch (opc)
12660 {
12661 case DW_LNS_advance_pc:
12662 case DW_LNS_advance_line:
12663 case DW_LNS_set_file:
12664 case DW_LNS_set_column:
12665 case DW_LNS_fixed_advance_pc:
12666 case DW_LNS_set_isa:
12667 n_op_args = 1;
12668 break;
12669 default:
12670 n_op_args = 0;
12671 break;
12672 }
12673
12674 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12675 opc, n_op_args);
12676 }
12677
12678 /* Write out the information about the files we use. */
12679 output_file_names ();
12680 ASM_OUTPUT_LABEL (asm_out_file, p2);
12681 if (prologue_only)
12682 {
12683 /* Output the marker for the end of the line number info. */
12684 ASM_OUTPUT_LABEL (asm_out_file, l2);
12685 return;
12686 }
12687
12688 if (separate_line_info)
12689 {
12690 dw_line_info_table *table;
12691 size_t i;
12692
12693 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12694 if (table->in_use)
12695 {
12696 output_one_line_info_table (table);
12697 saw_one = true;
12698 }
12699 }
12700 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12701 {
12702 output_one_line_info_table (cold_text_section_line_info);
12703 saw_one = true;
12704 }
12705
12706 /* ??? Some Darwin linkers crash on a .debug_line section with no
12707 sequences. Further, merely a DW_LNE_end_sequence entry is not
12708 sufficient -- the address column must also be initialized.
12709 Make sure to output at least one set_address/end_sequence pair,
12710 choosing .text since that section is always present. */
12711 if (text_section_line_info->in_use || !saw_one)
12712 output_one_line_info_table (text_section_line_info);
12713
12714 /* Output the marker for the end of the line number info. */
12715 ASM_OUTPUT_LABEL (asm_out_file, l2);
12716 }
12717 \f
12718 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12719
12720 static inline bool
12721 need_endianity_attribute_p (bool reverse)
12722 {
12723 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12724 }
12725
12726 /* Given a pointer to a tree node for some base type, return a pointer to
12727 a DIE that describes the given type. REVERSE is true if the type is
12728 to be interpreted in the reverse storage order wrt the target order.
12729
12730 This routine must only be called for GCC type nodes that correspond to
12731 Dwarf base (fundamental) types. */
12732
12733 static dw_die_ref
12734 base_type_die (tree type, bool reverse)
12735 {
12736 dw_die_ref base_type_result;
12737 enum dwarf_type encoding;
12738 bool fpt_used = false;
12739 struct fixed_point_type_info fpt_info;
12740 tree type_bias = NULL_TREE;
12741
12742 /* If this is a subtype that should not be emitted as a subrange type,
12743 use the base type. See subrange_type_for_debug_p. */
12744 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12745 type = TREE_TYPE (type);
12746
12747 switch (TREE_CODE (type))
12748 {
12749 case INTEGER_TYPE:
12750 if ((dwarf_version >= 4 || !dwarf_strict)
12751 && TYPE_NAME (type)
12752 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12753 && DECL_IS_BUILTIN (TYPE_NAME (type))
12754 && DECL_NAME (TYPE_NAME (type)))
12755 {
12756 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12757 if (strcmp (name, "char16_t") == 0
12758 || strcmp (name, "char32_t") == 0)
12759 {
12760 encoding = DW_ATE_UTF;
12761 break;
12762 }
12763 }
12764 if ((dwarf_version >= 3 || !dwarf_strict)
12765 && lang_hooks.types.get_fixed_point_type_info)
12766 {
12767 memset (&fpt_info, 0, sizeof (fpt_info));
12768 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12769 {
12770 fpt_used = true;
12771 encoding = ((TYPE_UNSIGNED (type))
12772 ? DW_ATE_unsigned_fixed
12773 : DW_ATE_signed_fixed);
12774 break;
12775 }
12776 }
12777 if (TYPE_STRING_FLAG (type))
12778 {
12779 if (TYPE_UNSIGNED (type))
12780 encoding = DW_ATE_unsigned_char;
12781 else
12782 encoding = DW_ATE_signed_char;
12783 }
12784 else if (TYPE_UNSIGNED (type))
12785 encoding = DW_ATE_unsigned;
12786 else
12787 encoding = DW_ATE_signed;
12788
12789 if (!dwarf_strict
12790 && lang_hooks.types.get_type_bias)
12791 type_bias = lang_hooks.types.get_type_bias (type);
12792 break;
12793
12794 case REAL_TYPE:
12795 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12796 {
12797 if (dwarf_version >= 3 || !dwarf_strict)
12798 encoding = DW_ATE_decimal_float;
12799 else
12800 encoding = DW_ATE_lo_user;
12801 }
12802 else
12803 encoding = DW_ATE_float;
12804 break;
12805
12806 case FIXED_POINT_TYPE:
12807 if (!(dwarf_version >= 3 || !dwarf_strict))
12808 encoding = DW_ATE_lo_user;
12809 else if (TYPE_UNSIGNED (type))
12810 encoding = DW_ATE_unsigned_fixed;
12811 else
12812 encoding = DW_ATE_signed_fixed;
12813 break;
12814
12815 /* Dwarf2 doesn't know anything about complex ints, so use
12816 a user defined type for it. */
12817 case COMPLEX_TYPE:
12818 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12819 encoding = DW_ATE_complex_float;
12820 else
12821 encoding = DW_ATE_lo_user;
12822 break;
12823
12824 case BOOLEAN_TYPE:
12825 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12826 encoding = DW_ATE_boolean;
12827 break;
12828
12829 default:
12830 /* No other TREE_CODEs are Dwarf fundamental types. */
12831 gcc_unreachable ();
12832 }
12833
12834 base_type_result = new_die_raw (DW_TAG_base_type);
12835
12836 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12837 int_size_in_bytes (type));
12838 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12839
12840 if (need_endianity_attribute_p (reverse))
12841 add_AT_unsigned (base_type_result, DW_AT_endianity,
12842 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12843
12844 add_alignment_attribute (base_type_result, type);
12845
12846 if (fpt_used)
12847 {
12848 switch (fpt_info.scale_factor_kind)
12849 {
12850 case fixed_point_scale_factor_binary:
12851 add_AT_int (base_type_result, DW_AT_binary_scale,
12852 fpt_info.scale_factor.binary);
12853 break;
12854
12855 case fixed_point_scale_factor_decimal:
12856 add_AT_int (base_type_result, DW_AT_decimal_scale,
12857 fpt_info.scale_factor.decimal);
12858 break;
12859
12860 case fixed_point_scale_factor_arbitrary:
12861 /* Arbitrary scale factors cannot be described in standard DWARF,
12862 yet. */
12863 if (!dwarf_strict)
12864 {
12865 /* Describe the scale factor as a rational constant. */
12866 const dw_die_ref scale_factor
12867 = new_die (DW_TAG_constant, comp_unit_die (), type);
12868
12869 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12870 fpt_info.scale_factor.arbitrary.numerator);
12871 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12872 fpt_info.scale_factor.arbitrary.denominator);
12873
12874 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12875 }
12876 break;
12877
12878 default:
12879 gcc_unreachable ();
12880 }
12881 }
12882
12883 if (type_bias)
12884 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12885 dw_scalar_form_constant
12886 | dw_scalar_form_exprloc
12887 | dw_scalar_form_reference,
12888 NULL);
12889
12890 return base_type_result;
12891 }
12892
12893 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12894 named 'auto' in its type: return true for it, false otherwise. */
12895
12896 static inline bool
12897 is_cxx_auto (tree type)
12898 {
12899 if (is_cxx ())
12900 {
12901 tree name = TYPE_IDENTIFIER (type);
12902 if (name == get_identifier ("auto")
12903 || name == get_identifier ("decltype(auto)"))
12904 return true;
12905 }
12906 return false;
12907 }
12908
12909 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12910 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12911
12912 static inline int
12913 is_base_type (tree type)
12914 {
12915 switch (TREE_CODE (type))
12916 {
12917 case INTEGER_TYPE:
12918 case REAL_TYPE:
12919 case FIXED_POINT_TYPE:
12920 case COMPLEX_TYPE:
12921 case BOOLEAN_TYPE:
12922 return 1;
12923
12924 case VOID_TYPE:
12925 case ARRAY_TYPE:
12926 case RECORD_TYPE:
12927 case UNION_TYPE:
12928 case QUAL_UNION_TYPE:
12929 case ENUMERAL_TYPE:
12930 case FUNCTION_TYPE:
12931 case METHOD_TYPE:
12932 case POINTER_TYPE:
12933 case REFERENCE_TYPE:
12934 case NULLPTR_TYPE:
12935 case OFFSET_TYPE:
12936 case LANG_TYPE:
12937 case VECTOR_TYPE:
12938 return 0;
12939
12940 default:
12941 if (is_cxx_auto (type))
12942 return 0;
12943 gcc_unreachable ();
12944 }
12945
12946 return 0;
12947 }
12948
12949 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12950 node, return the size in bits for the type if it is a constant, or else
12951 return the alignment for the type if the type's size is not constant, or
12952 else return BITS_PER_WORD if the type actually turns out to be an
12953 ERROR_MARK node. */
12954
12955 static inline unsigned HOST_WIDE_INT
12956 simple_type_size_in_bits (const_tree type)
12957 {
12958 if (TREE_CODE (type) == ERROR_MARK)
12959 return BITS_PER_WORD;
12960 else if (TYPE_SIZE (type) == NULL_TREE)
12961 return 0;
12962 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12963 return tree_to_uhwi (TYPE_SIZE (type));
12964 else
12965 return TYPE_ALIGN (type);
12966 }
12967
12968 /* Similarly, but return an offset_int instead of UHWI. */
12969
12970 static inline offset_int
12971 offset_int_type_size_in_bits (const_tree type)
12972 {
12973 if (TREE_CODE (type) == ERROR_MARK)
12974 return BITS_PER_WORD;
12975 else if (TYPE_SIZE (type) == NULL_TREE)
12976 return 0;
12977 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12978 return wi::to_offset (TYPE_SIZE (type));
12979 else
12980 return TYPE_ALIGN (type);
12981 }
12982
12983 /* Given a pointer to a tree node for a subrange type, return a pointer
12984 to a DIE that describes the given type. */
12985
12986 static dw_die_ref
12987 subrange_type_die (tree type, tree low, tree high, tree bias,
12988 dw_die_ref context_die)
12989 {
12990 dw_die_ref subrange_die;
12991 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12992
12993 if (context_die == NULL)
12994 context_die = comp_unit_die ();
12995
12996 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12997
12998 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12999 {
13000 /* The size of the subrange type and its base type do not match,
13001 so we need to generate a size attribute for the subrange type. */
13002 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13003 }
13004
13005 add_alignment_attribute (subrange_die, type);
13006
13007 if (low)
13008 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13009 if (high)
13010 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13011 if (bias && !dwarf_strict)
13012 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13013 dw_scalar_form_constant
13014 | dw_scalar_form_exprloc
13015 | dw_scalar_form_reference,
13016 NULL);
13017
13018 return subrange_die;
13019 }
13020
13021 /* Returns the (const and/or volatile) cv_qualifiers associated with
13022 the decl node. This will normally be augmented with the
13023 cv_qualifiers of the underlying type in add_type_attribute. */
13024
13025 static int
13026 decl_quals (const_tree decl)
13027 {
13028 return ((TREE_READONLY (decl)
13029 /* The C++ front-end correctly marks reference-typed
13030 variables as readonly, but from a language (and debug
13031 info) standpoint they are not const-qualified. */
13032 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13033 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13034 | (TREE_THIS_VOLATILE (decl)
13035 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13036 }
13037
13038 /* Determine the TYPE whose qualifiers match the largest strict subset
13039 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13040 qualifiers outside QUAL_MASK. */
13041
13042 static int
13043 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13044 {
13045 tree t;
13046 int best_rank = 0, best_qual = 0, max_rank;
13047
13048 type_quals &= qual_mask;
13049 max_rank = popcount_hwi (type_quals) - 1;
13050
13051 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13052 t = TYPE_NEXT_VARIANT (t))
13053 {
13054 int q = TYPE_QUALS (t) & qual_mask;
13055
13056 if ((q & type_quals) == q && q != type_quals
13057 && check_base_type (t, type))
13058 {
13059 int rank = popcount_hwi (q);
13060
13061 if (rank > best_rank)
13062 {
13063 best_rank = rank;
13064 best_qual = q;
13065 }
13066 }
13067 }
13068
13069 return best_qual;
13070 }
13071
13072 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13073 static const dwarf_qual_info_t dwarf_qual_info[] =
13074 {
13075 { TYPE_QUAL_CONST, DW_TAG_const_type },
13076 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13077 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13078 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13079 };
13080 static const unsigned int dwarf_qual_info_size
13081 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13082
13083 /* If DIE is a qualified DIE of some base DIE with the same parent,
13084 return the base DIE, otherwise return NULL. Set MASK to the
13085 qualifiers added compared to the returned DIE. */
13086
13087 static dw_die_ref
13088 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13089 {
13090 unsigned int i;
13091 for (i = 0; i < dwarf_qual_info_size; i++)
13092 if (die->die_tag == dwarf_qual_info[i].t)
13093 break;
13094 if (i == dwarf_qual_info_size)
13095 return NULL;
13096 if (vec_safe_length (die->die_attr) != 1)
13097 return NULL;
13098 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13099 if (type == NULL || type->die_parent != die->die_parent)
13100 return NULL;
13101 *mask |= dwarf_qual_info[i].q;
13102 if (depth)
13103 {
13104 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13105 if (ret)
13106 return ret;
13107 }
13108 return type;
13109 }
13110
13111 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13112 entry that chains the modifiers specified by CV_QUALS in front of the
13113 given type. REVERSE is true if the type is to be interpreted in the
13114 reverse storage order wrt the target order. */
13115
13116 static dw_die_ref
13117 modified_type_die (tree type, int cv_quals, bool reverse,
13118 dw_die_ref context_die)
13119 {
13120 enum tree_code code = TREE_CODE (type);
13121 dw_die_ref mod_type_die;
13122 dw_die_ref sub_die = NULL;
13123 tree item_type = NULL;
13124 tree qualified_type;
13125 tree name, low, high;
13126 dw_die_ref mod_scope;
13127 /* Only these cv-qualifiers are currently handled. */
13128 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13129 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13130 ENCODE_QUAL_ADDR_SPACE(~0U));
13131 const bool reverse_base_type
13132 = need_endianity_attribute_p (reverse) && is_base_type (type);
13133
13134 if (code == ERROR_MARK)
13135 return NULL;
13136
13137 if (lang_hooks.types.get_debug_type)
13138 {
13139 tree debug_type = lang_hooks.types.get_debug_type (type);
13140
13141 if (debug_type != NULL_TREE && debug_type != type)
13142 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13143 }
13144
13145 cv_quals &= cv_qual_mask;
13146
13147 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13148 tag modifier (and not an attribute) old consumers won't be able
13149 to handle it. */
13150 if (dwarf_version < 3)
13151 cv_quals &= ~TYPE_QUAL_RESTRICT;
13152
13153 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13154 if (dwarf_version < 5)
13155 cv_quals &= ~TYPE_QUAL_ATOMIC;
13156
13157 /* See if we already have the appropriately qualified variant of
13158 this type. */
13159 qualified_type = get_qualified_type (type, cv_quals);
13160
13161 if (qualified_type == sizetype)
13162 {
13163 /* Try not to expose the internal sizetype type's name. */
13164 if (TYPE_NAME (qualified_type)
13165 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13166 {
13167 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13168
13169 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13170 && (TYPE_PRECISION (t)
13171 == TYPE_PRECISION (qualified_type))
13172 && (TYPE_UNSIGNED (t)
13173 == TYPE_UNSIGNED (qualified_type)));
13174 qualified_type = t;
13175 }
13176 else if (qualified_type == sizetype
13177 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13178 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13179 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13180 qualified_type = size_type_node;
13181 }
13182
13183 /* If we do, then we can just use its DIE, if it exists. */
13184 if (qualified_type)
13185 {
13186 mod_type_die = lookup_type_die (qualified_type);
13187
13188 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13189 dealt with specially: the DIE with the attribute, if it exists, is
13190 placed immediately after the regular DIE for the same base type. */
13191 if (mod_type_die
13192 && (!reverse_base_type
13193 || ((mod_type_die = mod_type_die->die_sib) != NULL
13194 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13195 return mod_type_die;
13196 }
13197
13198 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13199
13200 /* Handle C typedef types. */
13201 if (name
13202 && TREE_CODE (name) == TYPE_DECL
13203 && DECL_ORIGINAL_TYPE (name)
13204 && !DECL_ARTIFICIAL (name))
13205 {
13206 tree dtype = TREE_TYPE (name);
13207
13208 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13209 if (qualified_type == dtype && !reverse_base_type)
13210 {
13211 tree origin = decl_ultimate_origin (name);
13212
13213 /* Typedef variants that have an abstract origin don't get their own
13214 type DIE (see gen_typedef_die), so fall back on the ultimate
13215 abstract origin instead. */
13216 if (origin != NULL && origin != name)
13217 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13218 context_die);
13219
13220 /* For a named type, use the typedef. */
13221 gen_type_die (qualified_type, context_die);
13222 return lookup_type_die (qualified_type);
13223 }
13224 else
13225 {
13226 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13227 dquals &= cv_qual_mask;
13228 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13229 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13230 /* cv-unqualified version of named type. Just use
13231 the unnamed type to which it refers. */
13232 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13233 reverse, context_die);
13234 /* Else cv-qualified version of named type; fall through. */
13235 }
13236 }
13237
13238 mod_scope = scope_die_for (type, context_die);
13239
13240 if (cv_quals)
13241 {
13242 int sub_quals = 0, first_quals = 0;
13243 unsigned i;
13244 dw_die_ref first = NULL, last = NULL;
13245
13246 /* Determine a lesser qualified type that most closely matches
13247 this one. Then generate DW_TAG_* entries for the remaining
13248 qualifiers. */
13249 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13250 cv_qual_mask);
13251 if (sub_quals && use_debug_types)
13252 {
13253 bool needed = false;
13254 /* If emitting type units, make sure the order of qualifiers
13255 is canonical. Thus, start from unqualified type if
13256 an earlier qualifier is missing in sub_quals, but some later
13257 one is present there. */
13258 for (i = 0; i < dwarf_qual_info_size; i++)
13259 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13260 needed = true;
13261 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13262 {
13263 sub_quals = 0;
13264 break;
13265 }
13266 }
13267 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13268 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13269 {
13270 /* As not all intermediate qualified DIEs have corresponding
13271 tree types, ensure that qualified DIEs in the same scope
13272 as their DW_AT_type are emitted after their DW_AT_type,
13273 only with other qualified DIEs for the same type possibly
13274 in between them. Determine the range of such qualified
13275 DIEs now (first being the base type, last being corresponding
13276 last qualified DIE for it). */
13277 unsigned int count = 0;
13278 first = qualified_die_p (mod_type_die, &first_quals,
13279 dwarf_qual_info_size);
13280 if (first == NULL)
13281 first = mod_type_die;
13282 gcc_assert ((first_quals & ~sub_quals) == 0);
13283 for (count = 0, last = first;
13284 count < (1U << dwarf_qual_info_size);
13285 count++, last = last->die_sib)
13286 {
13287 int quals = 0;
13288 if (last == mod_scope->die_child)
13289 break;
13290 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13291 != first)
13292 break;
13293 }
13294 }
13295
13296 for (i = 0; i < dwarf_qual_info_size; i++)
13297 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13298 {
13299 dw_die_ref d;
13300 if (first && first != last)
13301 {
13302 for (d = first->die_sib; ; d = d->die_sib)
13303 {
13304 int quals = 0;
13305 qualified_die_p (d, &quals, dwarf_qual_info_size);
13306 if (quals == (first_quals | dwarf_qual_info[i].q))
13307 break;
13308 if (d == last)
13309 {
13310 d = NULL;
13311 break;
13312 }
13313 }
13314 if (d)
13315 {
13316 mod_type_die = d;
13317 continue;
13318 }
13319 }
13320 if (first)
13321 {
13322 d = new_die_raw (dwarf_qual_info[i].t);
13323 add_child_die_after (mod_scope, d, last);
13324 last = d;
13325 }
13326 else
13327 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13328 if (mod_type_die)
13329 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13330 mod_type_die = d;
13331 first_quals |= dwarf_qual_info[i].q;
13332 }
13333 }
13334 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13335 {
13336 dwarf_tag tag = DW_TAG_pointer_type;
13337 if (code == REFERENCE_TYPE)
13338 {
13339 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13340 tag = DW_TAG_rvalue_reference_type;
13341 else
13342 tag = DW_TAG_reference_type;
13343 }
13344 mod_type_die = new_die (tag, mod_scope, type);
13345
13346 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13347 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13348 add_alignment_attribute (mod_type_die, type);
13349 item_type = TREE_TYPE (type);
13350
13351 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13352 if (!ADDR_SPACE_GENERIC_P (as))
13353 {
13354 int action = targetm.addr_space.debug (as);
13355 if (action >= 0)
13356 {
13357 /* Positive values indicate an address_class. */
13358 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13359 }
13360 else
13361 {
13362 /* Negative values indicate an (inverted) segment base reg. */
13363 dw_loc_descr_ref d
13364 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13365 add_AT_loc (mod_type_die, DW_AT_segment, d);
13366 }
13367 }
13368 }
13369 else if (code == INTEGER_TYPE
13370 && TREE_TYPE (type) != NULL_TREE
13371 && subrange_type_for_debug_p (type, &low, &high))
13372 {
13373 tree bias = NULL_TREE;
13374 if (lang_hooks.types.get_type_bias)
13375 bias = lang_hooks.types.get_type_bias (type);
13376 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13377 item_type = TREE_TYPE (type);
13378 }
13379 else if (is_base_type (type))
13380 {
13381 mod_type_die = base_type_die (type, reverse);
13382
13383 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13384 if (reverse_base_type)
13385 {
13386 dw_die_ref after_die
13387 = modified_type_die (type, cv_quals, false, context_die);
13388 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13389 }
13390 else
13391 add_child_die (comp_unit_die (), mod_type_die);
13392
13393 add_pubtype (type, mod_type_die);
13394 }
13395 else
13396 {
13397 gen_type_die (type, context_die);
13398
13399 /* We have to get the type_main_variant here (and pass that to the
13400 `lookup_type_die' routine) because the ..._TYPE node we have
13401 might simply be a *copy* of some original type node (where the
13402 copy was created to help us keep track of typedef names) and
13403 that copy might have a different TYPE_UID from the original
13404 ..._TYPE node. */
13405 if (TREE_CODE (type) == FUNCTION_TYPE
13406 || TREE_CODE (type) == METHOD_TYPE)
13407 {
13408 /* For function/method types, can't just use type_main_variant here,
13409 because that can have different ref-qualifiers for C++,
13410 but try to canonicalize. */
13411 tree main = TYPE_MAIN_VARIANT (type);
13412 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13413 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13414 && check_base_type (t, main)
13415 && check_lang_type (t, type))
13416 return lookup_type_die (t);
13417 return lookup_type_die (type);
13418 }
13419 else if (TREE_CODE (type) != VECTOR_TYPE
13420 && TREE_CODE (type) != ARRAY_TYPE)
13421 return lookup_type_die (type_main_variant (type));
13422 else
13423 /* Vectors have the debugging information in the type,
13424 not the main variant. */
13425 return lookup_type_die (type);
13426 }
13427
13428 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13429 don't output a DW_TAG_typedef, since there isn't one in the
13430 user's program; just attach a DW_AT_name to the type.
13431 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13432 if the base type already has the same name. */
13433 if (name
13434 && ((TREE_CODE (name) != TYPE_DECL
13435 && (qualified_type == TYPE_MAIN_VARIANT (type)
13436 || (cv_quals == TYPE_UNQUALIFIED)))
13437 || (TREE_CODE (name) == TYPE_DECL
13438 && TREE_TYPE (name) == qualified_type
13439 && DECL_NAME (name))))
13440 {
13441 if (TREE_CODE (name) == TYPE_DECL)
13442 /* Could just call add_name_and_src_coords_attributes here,
13443 but since this is a builtin type it doesn't have any
13444 useful source coordinates anyway. */
13445 name = DECL_NAME (name);
13446 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13447 }
13448 /* This probably indicates a bug. */
13449 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13450 {
13451 name = TYPE_IDENTIFIER (type);
13452 add_name_attribute (mod_type_die,
13453 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13454 }
13455
13456 if (qualified_type && !reverse_base_type)
13457 equate_type_number_to_die (qualified_type, mod_type_die);
13458
13459 if (item_type)
13460 /* We must do this after the equate_type_number_to_die call, in case
13461 this is a recursive type. This ensures that the modified_type_die
13462 recursion will terminate even if the type is recursive. Recursive
13463 types are possible in Ada. */
13464 sub_die = modified_type_die (item_type,
13465 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13466 reverse,
13467 context_die);
13468
13469 if (sub_die != NULL)
13470 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13471
13472 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13473 if (TYPE_ARTIFICIAL (type))
13474 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13475
13476 return mod_type_die;
13477 }
13478
13479 /* Generate DIEs for the generic parameters of T.
13480 T must be either a generic type or a generic function.
13481 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13482
13483 static void
13484 gen_generic_params_dies (tree t)
13485 {
13486 tree parms, args;
13487 int parms_num, i;
13488 dw_die_ref die = NULL;
13489 int non_default;
13490
13491 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13492 return;
13493
13494 if (TYPE_P (t))
13495 die = lookup_type_die (t);
13496 else if (DECL_P (t))
13497 die = lookup_decl_die (t);
13498
13499 gcc_assert (die);
13500
13501 parms = lang_hooks.get_innermost_generic_parms (t);
13502 if (!parms)
13503 /* T has no generic parameter. It means T is neither a generic type
13504 or function. End of story. */
13505 return;
13506
13507 parms_num = TREE_VEC_LENGTH (parms);
13508 args = lang_hooks.get_innermost_generic_args (t);
13509 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13510 non_default = int_cst_value (TREE_CHAIN (args));
13511 else
13512 non_default = TREE_VEC_LENGTH (args);
13513 for (i = 0; i < parms_num; i++)
13514 {
13515 tree parm, arg, arg_pack_elems;
13516 dw_die_ref parm_die;
13517
13518 parm = TREE_VEC_ELT (parms, i);
13519 arg = TREE_VEC_ELT (args, i);
13520 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13521 gcc_assert (parm && TREE_VALUE (parm) && arg);
13522
13523 if (parm && TREE_VALUE (parm) && arg)
13524 {
13525 /* If PARM represents a template parameter pack,
13526 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13527 by DW_TAG_template_*_parameter DIEs for the argument
13528 pack elements of ARG. Note that ARG would then be
13529 an argument pack. */
13530 if (arg_pack_elems)
13531 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13532 arg_pack_elems,
13533 die);
13534 else
13535 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13536 true /* emit name */, die);
13537 if (i >= non_default)
13538 add_AT_flag (parm_die, DW_AT_default_value, 1);
13539 }
13540 }
13541 }
13542
13543 /* Create and return a DIE for PARM which should be
13544 the representation of a generic type parameter.
13545 For instance, in the C++ front end, PARM would be a template parameter.
13546 ARG is the argument to PARM.
13547 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13548 name of the PARM.
13549 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13550 as a child node. */
13551
13552 static dw_die_ref
13553 generic_parameter_die (tree parm, tree arg,
13554 bool emit_name_p,
13555 dw_die_ref parent_die)
13556 {
13557 dw_die_ref tmpl_die = NULL;
13558 const char *name = NULL;
13559
13560 if (!parm || !DECL_NAME (parm) || !arg)
13561 return NULL;
13562
13563 /* We support non-type generic parameters and arguments,
13564 type generic parameters and arguments, as well as
13565 generic generic parameters (a.k.a. template template parameters in C++)
13566 and arguments. */
13567 if (TREE_CODE (parm) == PARM_DECL)
13568 /* PARM is a nontype generic parameter */
13569 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13570 else if (TREE_CODE (parm) == TYPE_DECL)
13571 /* PARM is a type generic parameter. */
13572 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13573 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13574 /* PARM is a generic generic parameter.
13575 Its DIE is a GNU extension. It shall have a
13576 DW_AT_name attribute to represent the name of the template template
13577 parameter, and a DW_AT_GNU_template_name attribute to represent the
13578 name of the template template argument. */
13579 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13580 parent_die, parm);
13581 else
13582 gcc_unreachable ();
13583
13584 if (tmpl_die)
13585 {
13586 tree tmpl_type;
13587
13588 /* If PARM is a generic parameter pack, it means we are
13589 emitting debug info for a template argument pack element.
13590 In other terms, ARG is a template argument pack element.
13591 In that case, we don't emit any DW_AT_name attribute for
13592 the die. */
13593 if (emit_name_p)
13594 {
13595 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13596 gcc_assert (name);
13597 add_AT_string (tmpl_die, DW_AT_name, name);
13598 }
13599
13600 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13601 {
13602 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13603 TMPL_DIE should have a child DW_AT_type attribute that is set
13604 to the type of the argument to PARM, which is ARG.
13605 If PARM is a type generic parameter, TMPL_DIE should have a
13606 child DW_AT_type that is set to ARG. */
13607 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13608 add_type_attribute (tmpl_die, tmpl_type,
13609 (TREE_THIS_VOLATILE (tmpl_type)
13610 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13611 false, parent_die);
13612 }
13613 else
13614 {
13615 /* So TMPL_DIE is a DIE representing a
13616 a generic generic template parameter, a.k.a template template
13617 parameter in C++ and arg is a template. */
13618
13619 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13620 to the name of the argument. */
13621 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13622 if (name)
13623 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13624 }
13625
13626 if (TREE_CODE (parm) == PARM_DECL)
13627 /* So PARM is a non-type generic parameter.
13628 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13629 attribute of TMPL_DIE which value represents the value
13630 of ARG.
13631 We must be careful here:
13632 The value of ARG might reference some function decls.
13633 We might currently be emitting debug info for a generic
13634 type and types are emitted before function decls, we don't
13635 know if the function decls referenced by ARG will actually be
13636 emitted after cgraph computations.
13637 So must defer the generation of the DW_AT_const_value to
13638 after cgraph is ready. */
13639 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13640 }
13641
13642 return tmpl_die;
13643 }
13644
13645 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13646 PARM_PACK must be a template parameter pack. The returned DIE
13647 will be child DIE of PARENT_DIE. */
13648
13649 static dw_die_ref
13650 template_parameter_pack_die (tree parm_pack,
13651 tree parm_pack_args,
13652 dw_die_ref parent_die)
13653 {
13654 dw_die_ref die;
13655 int j;
13656
13657 gcc_assert (parent_die && parm_pack);
13658
13659 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13660 add_name_and_src_coords_attributes (die, parm_pack);
13661 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13662 generic_parameter_die (parm_pack,
13663 TREE_VEC_ELT (parm_pack_args, j),
13664 false /* Don't emit DW_AT_name */,
13665 die);
13666 return die;
13667 }
13668
13669 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13670 an enumerated type. */
13671
13672 static inline int
13673 type_is_enum (const_tree type)
13674 {
13675 return TREE_CODE (type) == ENUMERAL_TYPE;
13676 }
13677
13678 /* Return the DBX register number described by a given RTL node. */
13679
13680 static unsigned int
13681 dbx_reg_number (const_rtx rtl)
13682 {
13683 unsigned regno = REGNO (rtl);
13684
13685 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13686
13687 #ifdef LEAF_REG_REMAP
13688 if (crtl->uses_only_leaf_regs)
13689 {
13690 int leaf_reg = LEAF_REG_REMAP (regno);
13691 if (leaf_reg != -1)
13692 regno = (unsigned) leaf_reg;
13693 }
13694 #endif
13695
13696 regno = DBX_REGISTER_NUMBER (regno);
13697 gcc_assert (regno != INVALID_REGNUM);
13698 return regno;
13699 }
13700
13701 /* Optionally add a DW_OP_piece term to a location description expression.
13702 DW_OP_piece is only added if the location description expression already
13703 doesn't end with DW_OP_piece. */
13704
13705 static void
13706 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13707 {
13708 dw_loc_descr_ref loc;
13709
13710 if (*list_head != NULL)
13711 {
13712 /* Find the end of the chain. */
13713 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13714 ;
13715
13716 if (loc->dw_loc_opc != DW_OP_piece)
13717 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13718 }
13719 }
13720
13721 /* Return a location descriptor that designates a machine register or
13722 zero if there is none. */
13723
13724 static dw_loc_descr_ref
13725 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13726 {
13727 rtx regs;
13728
13729 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13730 return 0;
13731
13732 /* We only use "frame base" when we're sure we're talking about the
13733 post-prologue local stack frame. We do this by *not* running
13734 register elimination until this point, and recognizing the special
13735 argument pointer and soft frame pointer rtx's.
13736 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13737 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13738 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13739 {
13740 dw_loc_descr_ref result = NULL;
13741
13742 if (dwarf_version >= 4 || !dwarf_strict)
13743 {
13744 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13745 initialized);
13746 if (result)
13747 add_loc_descr (&result,
13748 new_loc_descr (DW_OP_stack_value, 0, 0));
13749 }
13750 return result;
13751 }
13752
13753 regs = targetm.dwarf_register_span (rtl);
13754
13755 if (REG_NREGS (rtl) > 1 || regs)
13756 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13757 else
13758 {
13759 unsigned int dbx_regnum = dbx_reg_number (rtl);
13760 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13761 return 0;
13762 return one_reg_loc_descriptor (dbx_regnum, initialized);
13763 }
13764 }
13765
13766 /* Return a location descriptor that designates a machine register for
13767 a given hard register number. */
13768
13769 static dw_loc_descr_ref
13770 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13771 {
13772 dw_loc_descr_ref reg_loc_descr;
13773
13774 if (regno <= 31)
13775 reg_loc_descr
13776 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13777 else
13778 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13779
13780 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13781 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13782
13783 return reg_loc_descr;
13784 }
13785
13786 /* Given an RTL of a register, return a location descriptor that
13787 designates a value that spans more than one register. */
13788
13789 static dw_loc_descr_ref
13790 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13791 enum var_init_status initialized)
13792 {
13793 int size, i;
13794 dw_loc_descr_ref loc_result = NULL;
13795
13796 /* Simple, contiguous registers. */
13797 if (regs == NULL_RTX)
13798 {
13799 unsigned reg = REGNO (rtl);
13800 int nregs;
13801
13802 #ifdef LEAF_REG_REMAP
13803 if (crtl->uses_only_leaf_regs)
13804 {
13805 int leaf_reg = LEAF_REG_REMAP (reg);
13806 if (leaf_reg != -1)
13807 reg = (unsigned) leaf_reg;
13808 }
13809 #endif
13810
13811 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13812 nregs = REG_NREGS (rtl);
13813
13814 /* At present we only track constant-sized pieces. */
13815 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13816 return NULL;
13817 size /= nregs;
13818
13819 loc_result = NULL;
13820 while (nregs--)
13821 {
13822 dw_loc_descr_ref t;
13823
13824 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13825 VAR_INIT_STATUS_INITIALIZED);
13826 add_loc_descr (&loc_result, t);
13827 add_loc_descr_op_piece (&loc_result, size);
13828 ++reg;
13829 }
13830 return loc_result;
13831 }
13832
13833 /* Now onto stupid register sets in non contiguous locations. */
13834
13835 gcc_assert (GET_CODE (regs) == PARALLEL);
13836
13837 /* At present we only track constant-sized pieces. */
13838 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13839 return NULL;
13840 loc_result = NULL;
13841
13842 for (i = 0; i < XVECLEN (regs, 0); ++i)
13843 {
13844 dw_loc_descr_ref t;
13845
13846 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13847 VAR_INIT_STATUS_INITIALIZED);
13848 add_loc_descr (&loc_result, t);
13849 add_loc_descr_op_piece (&loc_result, size);
13850 }
13851
13852 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13853 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13854 return loc_result;
13855 }
13856
13857 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13858
13859 /* Return a location descriptor that designates a constant i,
13860 as a compound operation from constant (i >> shift), constant shift
13861 and DW_OP_shl. */
13862
13863 static dw_loc_descr_ref
13864 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13865 {
13866 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13867 add_loc_descr (&ret, int_loc_descriptor (shift));
13868 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13869 return ret;
13870 }
13871
13872 /* Return a location descriptor that designates constant POLY_I. */
13873
13874 static dw_loc_descr_ref
13875 int_loc_descriptor (poly_int64 poly_i)
13876 {
13877 enum dwarf_location_atom op;
13878
13879 HOST_WIDE_INT i;
13880 if (!poly_i.is_constant (&i))
13881 {
13882 /* Create location descriptions for the non-constant part and
13883 add any constant offset at the end. */
13884 dw_loc_descr_ref ret = NULL;
13885 HOST_WIDE_INT constant = poly_i.coeffs[0];
13886 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13887 {
13888 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13889 if (coeff != 0)
13890 {
13891 dw_loc_descr_ref start = ret;
13892 unsigned int factor;
13893 int bias;
13894 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13895 (j, &factor, &bias);
13896
13897 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13898 add COEFF * (REGNO / FACTOR) now and subtract
13899 COEFF * BIAS from the final constant part. */
13900 constant -= coeff * bias;
13901 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13902 if (coeff % factor == 0)
13903 coeff /= factor;
13904 else
13905 {
13906 int amount = exact_log2 (factor);
13907 gcc_assert (amount >= 0);
13908 add_loc_descr (&ret, int_loc_descriptor (amount));
13909 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13910 }
13911 if (coeff != 1)
13912 {
13913 add_loc_descr (&ret, int_loc_descriptor (coeff));
13914 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13915 }
13916 if (start)
13917 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13918 }
13919 }
13920 loc_descr_plus_const (&ret, constant);
13921 return ret;
13922 }
13923
13924 /* Pick the smallest representation of a constant, rather than just
13925 defaulting to the LEB encoding. */
13926 if (i >= 0)
13927 {
13928 int clz = clz_hwi (i);
13929 int ctz = ctz_hwi (i);
13930 if (i <= 31)
13931 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13932 else if (i <= 0xff)
13933 op = DW_OP_const1u;
13934 else if (i <= 0xffff)
13935 op = DW_OP_const2u;
13936 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13937 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13938 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13939 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13940 while DW_OP_const4u is 5 bytes. */
13941 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13942 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13943 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13944 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13945 while DW_OP_const4u is 5 bytes. */
13946 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13947
13948 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13949 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13950 <= 4)
13951 {
13952 /* As i >= 2**31, the double cast above will yield a negative number.
13953 Since wrapping is defined in DWARF expressions we can output big
13954 positive integers as small negative ones, regardless of the size
13955 of host wide ints.
13956
13957 Here, since the evaluator will handle 32-bit values and since i >=
13958 2**31, we know it's going to be interpreted as a negative literal:
13959 store it this way if we can do better than 5 bytes this way. */
13960 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13961 }
13962 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13963 op = DW_OP_const4u;
13964
13965 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13966 least 6 bytes: see if we can do better before falling back to it. */
13967 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13968 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13969 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13970 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13971 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13972 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13973 >= HOST_BITS_PER_WIDE_INT)
13974 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13975 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13976 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13977 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13978 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13979 && size_of_uleb128 (i) > 6)
13980 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13981 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13982 else
13983 op = DW_OP_constu;
13984 }
13985 else
13986 {
13987 if (i >= -0x80)
13988 op = DW_OP_const1s;
13989 else if (i >= -0x8000)
13990 op = DW_OP_const2s;
13991 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13992 {
13993 if (size_of_int_loc_descriptor (i) < 5)
13994 {
13995 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13996 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13997 return ret;
13998 }
13999 op = DW_OP_const4s;
14000 }
14001 else
14002 {
14003 if (size_of_int_loc_descriptor (i)
14004 < (unsigned long) 1 + size_of_sleb128 (i))
14005 {
14006 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14007 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14008 return ret;
14009 }
14010 op = DW_OP_consts;
14011 }
14012 }
14013
14014 return new_loc_descr (op, i, 0);
14015 }
14016
14017 /* Likewise, for unsigned constants. */
14018
14019 static dw_loc_descr_ref
14020 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14021 {
14022 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14023 const unsigned HOST_WIDE_INT max_uint
14024 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14025
14026 /* If possible, use the clever signed constants handling. */
14027 if (i <= max_int)
14028 return int_loc_descriptor ((HOST_WIDE_INT) i);
14029
14030 /* Here, we are left with positive numbers that cannot be represented as
14031 HOST_WIDE_INT, i.e.:
14032 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14033
14034 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14035 whereas may be better to output a negative integer: thanks to integer
14036 wrapping, we know that:
14037 x = x - 2 ** DWARF2_ADDR_SIZE
14038 = x - 2 * (max (HOST_WIDE_INT) + 1)
14039 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14040 small negative integers. Let's try that in cases it will clearly improve
14041 the encoding: there is no gain turning DW_OP_const4u into
14042 DW_OP_const4s. */
14043 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14044 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14045 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14046 {
14047 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14048
14049 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14050 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14051 const HOST_WIDE_INT second_shift
14052 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14053
14054 /* So we finally have:
14055 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14056 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14057 return int_loc_descriptor (second_shift);
14058 }
14059
14060 /* Last chance: fallback to a simple constant operation. */
14061 return new_loc_descr
14062 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14063 ? DW_OP_const4u
14064 : DW_OP_const8u,
14065 i, 0);
14066 }
14067
14068 /* Generate and return a location description that computes the unsigned
14069 comparison of the two stack top entries (a OP b where b is the top-most
14070 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14071 LE_EXPR, GT_EXPR or GE_EXPR. */
14072
14073 static dw_loc_descr_ref
14074 uint_comparison_loc_list (enum tree_code kind)
14075 {
14076 enum dwarf_location_atom op, flip_op;
14077 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14078
14079 switch (kind)
14080 {
14081 case LT_EXPR:
14082 op = DW_OP_lt;
14083 break;
14084 case LE_EXPR:
14085 op = DW_OP_le;
14086 break;
14087 case GT_EXPR:
14088 op = DW_OP_gt;
14089 break;
14090 case GE_EXPR:
14091 op = DW_OP_ge;
14092 break;
14093 default:
14094 gcc_unreachable ();
14095 }
14096
14097 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14098 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14099
14100 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14101 possible to perform unsigned comparisons: we just have to distinguish
14102 three cases:
14103
14104 1. when a and b have the same sign (as signed integers); then we should
14105 return: a OP(signed) b;
14106
14107 2. when a is a negative signed integer while b is a positive one, then a
14108 is a greater unsigned integer than b; likewise when a and b's roles
14109 are flipped.
14110
14111 So first, compare the sign of the two operands. */
14112 ret = new_loc_descr (DW_OP_over, 0, 0);
14113 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14114 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14115 /* If they have different signs (i.e. they have different sign bits), then
14116 the stack top value has now the sign bit set and thus it's smaller than
14117 zero. */
14118 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14119 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14120 add_loc_descr (&ret, bra_node);
14121
14122 /* We are in case 1. At this point, we know both operands have the same
14123 sign, to it's safe to use the built-in signed comparison. */
14124 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14125 add_loc_descr (&ret, jmp_node);
14126
14127 /* We are in case 2. Here, we know both operands do not have the same sign,
14128 so we have to flip the signed comparison. */
14129 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14130 tmp = new_loc_descr (flip_op, 0, 0);
14131 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14132 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14133 add_loc_descr (&ret, tmp);
14134
14135 /* This dummy operation is necessary to make the two branches join. */
14136 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14137 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14138 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14139 add_loc_descr (&ret, tmp);
14140
14141 return ret;
14142 }
14143
14144 /* Likewise, but takes the location description lists (might be destructive on
14145 them). Return NULL if either is NULL or if concatenation fails. */
14146
14147 static dw_loc_list_ref
14148 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14149 enum tree_code kind)
14150 {
14151 if (left == NULL || right == NULL)
14152 return NULL;
14153
14154 add_loc_list (&left, right);
14155 if (left == NULL)
14156 return NULL;
14157
14158 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14159 return left;
14160 }
14161
14162 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14163 without actually allocating it. */
14164
14165 static unsigned long
14166 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14167 {
14168 return size_of_int_loc_descriptor (i >> shift)
14169 + size_of_int_loc_descriptor (shift)
14170 + 1;
14171 }
14172
14173 /* Return size_of_locs (int_loc_descriptor (i)) without
14174 actually allocating it. */
14175
14176 static unsigned long
14177 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14178 {
14179 unsigned long s;
14180
14181 if (i >= 0)
14182 {
14183 int clz, ctz;
14184 if (i <= 31)
14185 return 1;
14186 else if (i <= 0xff)
14187 return 2;
14188 else if (i <= 0xffff)
14189 return 3;
14190 clz = clz_hwi (i);
14191 ctz = ctz_hwi (i);
14192 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14193 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14194 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14195 - clz - 5);
14196 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14197 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14198 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14199 - clz - 8);
14200 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14201 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14202 <= 4)
14203 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14204 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14205 return 5;
14206 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14207 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14208 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14209 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14210 - clz - 8);
14211 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14212 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14213 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14214 - clz - 16);
14215 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14216 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14217 && s > 6)
14218 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14219 - clz - 32);
14220 else
14221 return 1 + s;
14222 }
14223 else
14224 {
14225 if (i >= -0x80)
14226 return 2;
14227 else if (i >= -0x8000)
14228 return 3;
14229 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14230 {
14231 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14232 {
14233 s = size_of_int_loc_descriptor (-i) + 1;
14234 if (s < 5)
14235 return s;
14236 }
14237 return 5;
14238 }
14239 else
14240 {
14241 unsigned long r = 1 + size_of_sleb128 (i);
14242 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14243 {
14244 s = size_of_int_loc_descriptor (-i) + 1;
14245 if (s < r)
14246 return s;
14247 }
14248 return r;
14249 }
14250 }
14251 }
14252
14253 /* Return loc description representing "address" of integer value.
14254 This can appear only as toplevel expression. */
14255
14256 static dw_loc_descr_ref
14257 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14258 {
14259 int litsize;
14260 dw_loc_descr_ref loc_result = NULL;
14261
14262 if (!(dwarf_version >= 4 || !dwarf_strict))
14263 return NULL;
14264
14265 litsize = size_of_int_loc_descriptor (i);
14266 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14267 is more compact. For DW_OP_stack_value we need:
14268 litsize + 1 (DW_OP_stack_value)
14269 and for DW_OP_implicit_value:
14270 1 (DW_OP_implicit_value) + 1 (length) + size. */
14271 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14272 {
14273 loc_result = int_loc_descriptor (i);
14274 add_loc_descr (&loc_result,
14275 new_loc_descr (DW_OP_stack_value, 0, 0));
14276 return loc_result;
14277 }
14278
14279 loc_result = new_loc_descr (DW_OP_implicit_value,
14280 size, 0);
14281 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14282 loc_result->dw_loc_oprnd2.v.val_int = i;
14283 return loc_result;
14284 }
14285
14286 /* Return a location descriptor that designates a base+offset location. */
14287
14288 static dw_loc_descr_ref
14289 based_loc_descr (rtx reg, poly_int64 offset,
14290 enum var_init_status initialized)
14291 {
14292 unsigned int regno;
14293 dw_loc_descr_ref result;
14294 dw_fde_ref fde = cfun->fde;
14295
14296 /* We only use "frame base" when we're sure we're talking about the
14297 post-prologue local stack frame. We do this by *not* running
14298 register elimination until this point, and recognizing the special
14299 argument pointer and soft frame pointer rtx's. */
14300 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14301 {
14302 rtx elim = (ira_use_lra_p
14303 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14304 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14305
14306 if (elim != reg)
14307 {
14308 elim = strip_offset_and_add (elim, &offset);
14309 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
14310 && (elim == hard_frame_pointer_rtx
14311 || elim == stack_pointer_rtx))
14312 || elim == (frame_pointer_needed
14313 ? hard_frame_pointer_rtx
14314 : stack_pointer_rtx));
14315
14316 /* If drap register is used to align stack, use frame
14317 pointer + offset to access stack variables. If stack
14318 is aligned without drap, use stack pointer + offset to
14319 access stack variables. */
14320 if (crtl->stack_realign_tried
14321 && reg == frame_pointer_rtx)
14322 {
14323 int base_reg
14324 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14325 ? HARD_FRAME_POINTER_REGNUM
14326 : REGNO (elim));
14327 return new_reg_loc_descr (base_reg, offset);
14328 }
14329
14330 gcc_assert (frame_pointer_fb_offset_valid);
14331 offset += frame_pointer_fb_offset;
14332 HOST_WIDE_INT const_offset;
14333 if (offset.is_constant (&const_offset))
14334 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14335 else
14336 {
14337 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14338 loc_descr_plus_const (&ret, offset);
14339 return ret;
14340 }
14341 }
14342 }
14343
14344 regno = REGNO (reg);
14345 #ifdef LEAF_REG_REMAP
14346 if (crtl->uses_only_leaf_regs)
14347 {
14348 int leaf_reg = LEAF_REG_REMAP (regno);
14349 if (leaf_reg != -1)
14350 regno = (unsigned) leaf_reg;
14351 }
14352 #endif
14353 regno = DWARF_FRAME_REGNUM (regno);
14354
14355 HOST_WIDE_INT const_offset;
14356 if (!optimize && fde
14357 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14358 && offset.is_constant (&const_offset))
14359 {
14360 /* Use cfa+offset to represent the location of arguments passed
14361 on the stack when drap is used to align stack.
14362 Only do this when not optimizing, for optimized code var-tracking
14363 is supposed to track where the arguments live and the register
14364 used as vdrap or drap in some spot might be used for something
14365 else in other part of the routine. */
14366 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14367 }
14368
14369 result = new_reg_loc_descr (regno, offset);
14370
14371 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14372 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14373
14374 return result;
14375 }
14376
14377 /* Return true if this RTL expression describes a base+offset calculation. */
14378
14379 static inline int
14380 is_based_loc (const_rtx rtl)
14381 {
14382 return (GET_CODE (rtl) == PLUS
14383 && ((REG_P (XEXP (rtl, 0))
14384 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14385 && CONST_INT_P (XEXP (rtl, 1)))));
14386 }
14387
14388 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14389 failed. */
14390
14391 static dw_loc_descr_ref
14392 tls_mem_loc_descriptor (rtx mem)
14393 {
14394 tree base;
14395 dw_loc_descr_ref loc_result;
14396
14397 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14398 return NULL;
14399
14400 base = get_base_address (MEM_EXPR (mem));
14401 if (base == NULL
14402 || !VAR_P (base)
14403 || !DECL_THREAD_LOCAL_P (base))
14404 return NULL;
14405
14406 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14407 if (loc_result == NULL)
14408 return NULL;
14409
14410 if (maybe_ne (MEM_OFFSET (mem), 0))
14411 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14412
14413 return loc_result;
14414 }
14415
14416 /* Output debug info about reason why we failed to expand expression as dwarf
14417 expression. */
14418
14419 static void
14420 expansion_failed (tree expr, rtx rtl, char const *reason)
14421 {
14422 if (dump_file && (dump_flags & TDF_DETAILS))
14423 {
14424 fprintf (dump_file, "Failed to expand as dwarf: ");
14425 if (expr)
14426 print_generic_expr (dump_file, expr, dump_flags);
14427 if (rtl)
14428 {
14429 fprintf (dump_file, "\n");
14430 print_rtl (dump_file, rtl);
14431 }
14432 fprintf (dump_file, "\nReason: %s\n", reason);
14433 }
14434 }
14435
14436 /* Helper function for const_ok_for_output. */
14437
14438 static bool
14439 const_ok_for_output_1 (rtx rtl)
14440 {
14441 if (targetm.const_not_ok_for_debug_p (rtl))
14442 {
14443 if (GET_CODE (rtl) != UNSPEC)
14444 {
14445 expansion_failed (NULL_TREE, rtl,
14446 "Expression rejected for debug by the backend.\n");
14447 return false;
14448 }
14449
14450 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14451 the target hook doesn't explicitly allow it in debug info, assume
14452 we can't express it in the debug info. */
14453 /* Don't complain about TLS UNSPECs, those are just too hard to
14454 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14455 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14456 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14457 if (flag_checking
14458 && (XVECLEN (rtl, 0) == 0
14459 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14460 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14461 inform (current_function_decl
14462 ? DECL_SOURCE_LOCATION (current_function_decl)
14463 : UNKNOWN_LOCATION,
14464 #if NUM_UNSPEC_VALUES > 0
14465 "non-delegitimized UNSPEC %s (%d) found in variable location",
14466 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14467 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14468 XINT (rtl, 1));
14469 #else
14470 "non-delegitimized UNSPEC %d found in variable location",
14471 XINT (rtl, 1));
14472 #endif
14473 expansion_failed (NULL_TREE, rtl,
14474 "UNSPEC hasn't been delegitimized.\n");
14475 return false;
14476 }
14477
14478 if (CONST_POLY_INT_P (rtl))
14479 return false;
14480
14481 if (targetm.const_not_ok_for_debug_p (rtl))
14482 {
14483 expansion_failed (NULL_TREE, rtl,
14484 "Expression rejected for debug by the backend.\n");
14485 return false;
14486 }
14487
14488 /* FIXME: Refer to PR60655. It is possible for simplification
14489 of rtl expressions in var tracking to produce such expressions.
14490 We should really identify / validate expressions
14491 enclosed in CONST that can be handled by assemblers on various
14492 targets and only handle legitimate cases here. */
14493 switch (GET_CODE (rtl))
14494 {
14495 case SYMBOL_REF:
14496 break;
14497 case NOT:
14498 case NEG:
14499 return false;
14500 default:
14501 return true;
14502 }
14503
14504 if (CONSTANT_POOL_ADDRESS_P (rtl))
14505 {
14506 bool marked;
14507 get_pool_constant_mark (rtl, &marked);
14508 /* If all references to this pool constant were optimized away,
14509 it was not output and thus we can't represent it. */
14510 if (!marked)
14511 {
14512 expansion_failed (NULL_TREE, rtl,
14513 "Constant was removed from constant pool.\n");
14514 return false;
14515 }
14516 }
14517
14518 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14519 return false;
14520
14521 /* Avoid references to external symbols in debug info, on several targets
14522 the linker might even refuse to link when linking a shared library,
14523 and in many other cases the relocations for .debug_info/.debug_loc are
14524 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14525 to be defined within the same shared library or executable are fine. */
14526 if (SYMBOL_REF_EXTERNAL_P (rtl))
14527 {
14528 tree decl = SYMBOL_REF_DECL (rtl);
14529
14530 if (decl == NULL || !targetm.binds_local_p (decl))
14531 {
14532 expansion_failed (NULL_TREE, rtl,
14533 "Symbol not defined in current TU.\n");
14534 return false;
14535 }
14536 }
14537
14538 return true;
14539 }
14540
14541 /* Return true if constant RTL can be emitted in DW_OP_addr or
14542 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14543 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14544
14545 static bool
14546 const_ok_for_output (rtx rtl)
14547 {
14548 if (GET_CODE (rtl) == SYMBOL_REF)
14549 return const_ok_for_output_1 (rtl);
14550
14551 if (GET_CODE (rtl) == CONST)
14552 {
14553 subrtx_var_iterator::array_type array;
14554 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14555 if (!const_ok_for_output_1 (*iter))
14556 return false;
14557 return true;
14558 }
14559
14560 return true;
14561 }
14562
14563 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14564 if possible, NULL otherwise. */
14565
14566 static dw_die_ref
14567 base_type_for_mode (machine_mode mode, bool unsignedp)
14568 {
14569 dw_die_ref type_die;
14570 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14571
14572 if (type == NULL)
14573 return NULL;
14574 switch (TREE_CODE (type))
14575 {
14576 case INTEGER_TYPE:
14577 case REAL_TYPE:
14578 break;
14579 default:
14580 return NULL;
14581 }
14582 type_die = lookup_type_die (type);
14583 if (!type_die)
14584 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14585 comp_unit_die ());
14586 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14587 return NULL;
14588 return type_die;
14589 }
14590
14591 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14592 type matching MODE, or, if MODE is narrower than or as wide as
14593 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14594 possible. */
14595
14596 static dw_loc_descr_ref
14597 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14598 {
14599 machine_mode outer_mode = mode;
14600 dw_die_ref type_die;
14601 dw_loc_descr_ref cvt;
14602
14603 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14604 {
14605 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14606 return op;
14607 }
14608 type_die = base_type_for_mode (outer_mode, 1);
14609 if (type_die == NULL)
14610 return NULL;
14611 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14612 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14613 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14614 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14615 add_loc_descr (&op, cvt);
14616 return op;
14617 }
14618
14619 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14620
14621 static dw_loc_descr_ref
14622 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14623 dw_loc_descr_ref op1)
14624 {
14625 dw_loc_descr_ref ret = op0;
14626 add_loc_descr (&ret, op1);
14627 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14628 if (STORE_FLAG_VALUE != 1)
14629 {
14630 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14631 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14632 }
14633 return ret;
14634 }
14635
14636 /* Subroutine of scompare_loc_descriptor for the case in which we're
14637 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14638 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14639
14640 static dw_loc_descr_ref
14641 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14642 scalar_int_mode op_mode,
14643 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14644 {
14645 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14646 dw_loc_descr_ref cvt;
14647
14648 if (type_die == NULL)
14649 return NULL;
14650 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14651 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14652 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14653 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14654 add_loc_descr (&op0, cvt);
14655 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14656 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14657 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14658 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14659 add_loc_descr (&op1, cvt);
14660 return compare_loc_descriptor (op, op0, op1);
14661 }
14662
14663 /* Subroutine of scompare_loc_descriptor for the case in which we're
14664 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14665 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14666
14667 static dw_loc_descr_ref
14668 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14669 scalar_int_mode op_mode,
14670 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14671 {
14672 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14673 /* For eq/ne, if the operands are known to be zero-extended,
14674 there is no need to do the fancy shifting up. */
14675 if (op == DW_OP_eq || op == DW_OP_ne)
14676 {
14677 dw_loc_descr_ref last0, last1;
14678 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14679 ;
14680 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14681 ;
14682 /* deref_size zero extends, and for constants we can check
14683 whether they are zero extended or not. */
14684 if (((last0->dw_loc_opc == DW_OP_deref_size
14685 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14686 || (CONST_INT_P (XEXP (rtl, 0))
14687 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14688 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14689 && ((last1->dw_loc_opc == DW_OP_deref_size
14690 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14691 || (CONST_INT_P (XEXP (rtl, 1))
14692 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14693 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14694 return compare_loc_descriptor (op, op0, op1);
14695
14696 /* EQ/NE comparison against constant in narrower type than
14697 DWARF2_ADDR_SIZE can be performed either as
14698 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14699 DW_OP_{eq,ne}
14700 or
14701 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14702 DW_OP_{eq,ne}. Pick whatever is shorter. */
14703 if (CONST_INT_P (XEXP (rtl, 1))
14704 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14705 && (size_of_int_loc_descriptor (shift) + 1
14706 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14707 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14708 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14709 & GET_MODE_MASK (op_mode))))
14710 {
14711 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14712 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14713 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14714 & GET_MODE_MASK (op_mode));
14715 return compare_loc_descriptor (op, op0, op1);
14716 }
14717 }
14718 add_loc_descr (&op0, int_loc_descriptor (shift));
14719 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14720 if (CONST_INT_P (XEXP (rtl, 1)))
14721 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14722 else
14723 {
14724 add_loc_descr (&op1, int_loc_descriptor (shift));
14725 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14726 }
14727 return compare_loc_descriptor (op, op0, op1);
14728 }
14729
14730 /* Return location descriptor for unsigned comparison OP RTL. */
14731
14732 static dw_loc_descr_ref
14733 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14734 machine_mode mem_mode)
14735 {
14736 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14737 dw_loc_descr_ref op0, op1;
14738
14739 if (op_mode == VOIDmode)
14740 op_mode = GET_MODE (XEXP (rtl, 1));
14741 if (op_mode == VOIDmode)
14742 return NULL;
14743
14744 scalar_int_mode int_op_mode;
14745 if (dwarf_strict
14746 && dwarf_version < 5
14747 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14748 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14749 return NULL;
14750
14751 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14752 VAR_INIT_STATUS_INITIALIZED);
14753 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14754 VAR_INIT_STATUS_INITIALIZED);
14755
14756 if (op0 == NULL || op1 == NULL)
14757 return NULL;
14758
14759 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14760 {
14761 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14762 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14763
14764 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14765 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14766 }
14767 return compare_loc_descriptor (op, op0, op1);
14768 }
14769
14770 /* Return location descriptor for unsigned comparison OP RTL. */
14771
14772 static dw_loc_descr_ref
14773 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14774 machine_mode mem_mode)
14775 {
14776 dw_loc_descr_ref op0, op1;
14777
14778 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14779 if (test_op_mode == VOIDmode)
14780 test_op_mode = GET_MODE (XEXP (rtl, 1));
14781
14782 scalar_int_mode op_mode;
14783 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14784 return NULL;
14785
14786 if (dwarf_strict
14787 && dwarf_version < 5
14788 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14789 return NULL;
14790
14791 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14792 VAR_INIT_STATUS_INITIALIZED);
14793 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14794 VAR_INIT_STATUS_INITIALIZED);
14795
14796 if (op0 == NULL || op1 == NULL)
14797 return NULL;
14798
14799 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14800 {
14801 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14802 dw_loc_descr_ref last0, last1;
14803 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14804 ;
14805 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14806 ;
14807 if (CONST_INT_P (XEXP (rtl, 0)))
14808 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14809 /* deref_size zero extends, so no need to mask it again. */
14810 else if (last0->dw_loc_opc != DW_OP_deref_size
14811 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14812 {
14813 add_loc_descr (&op0, int_loc_descriptor (mask));
14814 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14815 }
14816 if (CONST_INT_P (XEXP (rtl, 1)))
14817 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14818 /* deref_size zero extends, so no need to mask it again. */
14819 else if (last1->dw_loc_opc != DW_OP_deref_size
14820 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14821 {
14822 add_loc_descr (&op1, int_loc_descriptor (mask));
14823 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14824 }
14825 }
14826 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14827 {
14828 HOST_WIDE_INT bias = 1;
14829 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14830 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14831 if (CONST_INT_P (XEXP (rtl, 1)))
14832 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14833 + INTVAL (XEXP (rtl, 1)));
14834 else
14835 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14836 bias, 0));
14837 }
14838 return compare_loc_descriptor (op, op0, op1);
14839 }
14840
14841 /* Return location descriptor for {U,S}{MIN,MAX}. */
14842
14843 static dw_loc_descr_ref
14844 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14845 machine_mode mem_mode)
14846 {
14847 enum dwarf_location_atom op;
14848 dw_loc_descr_ref op0, op1, ret;
14849 dw_loc_descr_ref bra_node, drop_node;
14850
14851 scalar_int_mode int_mode;
14852 if (dwarf_strict
14853 && dwarf_version < 5
14854 && (!is_a <scalar_int_mode> (mode, &int_mode)
14855 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14856 return NULL;
14857
14858 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14859 VAR_INIT_STATUS_INITIALIZED);
14860 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14861 VAR_INIT_STATUS_INITIALIZED);
14862
14863 if (op0 == NULL || op1 == NULL)
14864 return NULL;
14865
14866 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14867 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14868 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14869 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14870 {
14871 /* Checked by the caller. */
14872 int_mode = as_a <scalar_int_mode> (mode);
14873 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14874 {
14875 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14876 add_loc_descr (&op0, int_loc_descriptor (mask));
14877 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14878 add_loc_descr (&op1, int_loc_descriptor (mask));
14879 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14880 }
14881 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14882 {
14883 HOST_WIDE_INT bias = 1;
14884 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14885 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14886 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14887 }
14888 }
14889 else if (is_a <scalar_int_mode> (mode, &int_mode)
14890 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14891 {
14892 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14893 add_loc_descr (&op0, int_loc_descriptor (shift));
14894 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14895 add_loc_descr (&op1, int_loc_descriptor (shift));
14896 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14897 }
14898 else if (is_a <scalar_int_mode> (mode, &int_mode)
14899 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14900 {
14901 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14902 dw_loc_descr_ref cvt;
14903 if (type_die == NULL)
14904 return NULL;
14905 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14906 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14907 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14908 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14909 add_loc_descr (&op0, cvt);
14910 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14911 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14912 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14913 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14914 add_loc_descr (&op1, cvt);
14915 }
14916
14917 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14918 op = DW_OP_lt;
14919 else
14920 op = DW_OP_gt;
14921 ret = op0;
14922 add_loc_descr (&ret, op1);
14923 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14924 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14925 add_loc_descr (&ret, bra_node);
14926 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14927 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14928 add_loc_descr (&ret, drop_node);
14929 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14930 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14931 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14932 && is_a <scalar_int_mode> (mode, &int_mode)
14933 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14934 ret = convert_descriptor_to_mode (int_mode, ret);
14935 return ret;
14936 }
14937
14938 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14939 but after converting arguments to type_die, afterwards
14940 convert back to unsigned. */
14941
14942 static dw_loc_descr_ref
14943 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14944 scalar_int_mode mode, machine_mode mem_mode)
14945 {
14946 dw_loc_descr_ref cvt, op0, op1;
14947
14948 if (type_die == NULL)
14949 return NULL;
14950 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14951 VAR_INIT_STATUS_INITIALIZED);
14952 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14953 VAR_INIT_STATUS_INITIALIZED);
14954 if (op0 == NULL || op1 == NULL)
14955 return NULL;
14956 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14957 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14958 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14959 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14960 add_loc_descr (&op0, cvt);
14961 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14962 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14963 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14964 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14965 add_loc_descr (&op1, cvt);
14966 add_loc_descr (&op0, op1);
14967 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14968 return convert_descriptor_to_mode (mode, op0);
14969 }
14970
14971 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14972 const0 is DW_OP_lit0 or corresponding typed constant,
14973 const1 is DW_OP_lit1 or corresponding typed constant
14974 and constMSB is constant with just the MSB bit set
14975 for the mode):
14976 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14977 L1: const0 DW_OP_swap
14978 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14979 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14980 L3: DW_OP_drop
14981 L4: DW_OP_nop
14982
14983 CTZ is similar:
14984 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14985 L1: const0 DW_OP_swap
14986 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14987 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14988 L3: DW_OP_drop
14989 L4: DW_OP_nop
14990
14991 FFS is similar:
14992 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14993 L1: const1 DW_OP_swap
14994 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14995 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14996 L3: DW_OP_drop
14997 L4: DW_OP_nop */
14998
14999 static dw_loc_descr_ref
15000 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
15001 machine_mode mem_mode)
15002 {
15003 dw_loc_descr_ref op0, ret, tmp;
15004 HOST_WIDE_INT valv;
15005 dw_loc_descr_ref l1jump, l1label;
15006 dw_loc_descr_ref l2jump, l2label;
15007 dw_loc_descr_ref l3jump, l3label;
15008 dw_loc_descr_ref l4jump, l4label;
15009 rtx msb;
15010
15011 if (GET_MODE (XEXP (rtl, 0)) != mode)
15012 return NULL;
15013
15014 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15015 VAR_INIT_STATUS_INITIALIZED);
15016 if (op0 == NULL)
15017 return NULL;
15018 ret = op0;
15019 if (GET_CODE (rtl) == CLZ)
15020 {
15021 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15022 valv = GET_MODE_BITSIZE (mode);
15023 }
15024 else if (GET_CODE (rtl) == FFS)
15025 valv = 0;
15026 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15027 valv = GET_MODE_BITSIZE (mode);
15028 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15029 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15030 add_loc_descr (&ret, l1jump);
15031 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15032 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15033 VAR_INIT_STATUS_INITIALIZED);
15034 if (tmp == NULL)
15035 return NULL;
15036 add_loc_descr (&ret, tmp);
15037 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15038 add_loc_descr (&ret, l4jump);
15039 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15040 ? const1_rtx : const0_rtx,
15041 mode, mem_mode,
15042 VAR_INIT_STATUS_INITIALIZED);
15043 if (l1label == NULL)
15044 return NULL;
15045 add_loc_descr (&ret, l1label);
15046 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15047 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15048 add_loc_descr (&ret, l2label);
15049 if (GET_CODE (rtl) != CLZ)
15050 msb = const1_rtx;
15051 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15052 msb = GEN_INT (HOST_WIDE_INT_1U
15053 << (GET_MODE_BITSIZE (mode) - 1));
15054 else
15055 msb = immed_wide_int_const
15056 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15057 GET_MODE_PRECISION (mode)), mode);
15058 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15059 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15060 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15061 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15062 else
15063 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15064 VAR_INIT_STATUS_INITIALIZED);
15065 if (tmp == NULL)
15066 return NULL;
15067 add_loc_descr (&ret, tmp);
15068 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15069 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15070 add_loc_descr (&ret, l3jump);
15071 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15072 VAR_INIT_STATUS_INITIALIZED);
15073 if (tmp == NULL)
15074 return NULL;
15075 add_loc_descr (&ret, tmp);
15076 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15077 ? DW_OP_shl : DW_OP_shr, 0, 0));
15078 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15079 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15080 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15081 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15082 add_loc_descr (&ret, l2jump);
15083 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15084 add_loc_descr (&ret, l3label);
15085 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15086 add_loc_descr (&ret, l4label);
15087 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15088 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15089 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15090 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15091 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15092 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15093 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15094 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15095 return ret;
15096 }
15097
15098 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15099 const1 is DW_OP_lit1 or corresponding typed constant):
15100 const0 DW_OP_swap
15101 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15102 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15103 L2: DW_OP_drop
15104
15105 PARITY is similar:
15106 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15107 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15108 L2: DW_OP_drop */
15109
15110 static dw_loc_descr_ref
15111 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15112 machine_mode mem_mode)
15113 {
15114 dw_loc_descr_ref op0, ret, tmp;
15115 dw_loc_descr_ref l1jump, l1label;
15116 dw_loc_descr_ref l2jump, l2label;
15117
15118 if (GET_MODE (XEXP (rtl, 0)) != mode)
15119 return NULL;
15120
15121 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15122 VAR_INIT_STATUS_INITIALIZED);
15123 if (op0 == NULL)
15124 return NULL;
15125 ret = op0;
15126 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15127 VAR_INIT_STATUS_INITIALIZED);
15128 if (tmp == NULL)
15129 return NULL;
15130 add_loc_descr (&ret, tmp);
15131 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15132 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15133 add_loc_descr (&ret, l1label);
15134 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15135 add_loc_descr (&ret, l2jump);
15136 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15137 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15138 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15139 VAR_INIT_STATUS_INITIALIZED);
15140 if (tmp == NULL)
15141 return NULL;
15142 add_loc_descr (&ret, tmp);
15143 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15144 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15145 ? DW_OP_plus : DW_OP_xor, 0, 0));
15146 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15147 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15148 VAR_INIT_STATUS_INITIALIZED);
15149 add_loc_descr (&ret, tmp);
15150 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15151 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15152 add_loc_descr (&ret, l1jump);
15153 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15154 add_loc_descr (&ret, l2label);
15155 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15156 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15157 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15158 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15159 return ret;
15160 }
15161
15162 /* BSWAP (constS is initial shift count, either 56 or 24):
15163 constS const0
15164 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15165 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15166 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15167 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15168 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15169
15170 static dw_loc_descr_ref
15171 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15172 machine_mode mem_mode)
15173 {
15174 dw_loc_descr_ref op0, ret, tmp;
15175 dw_loc_descr_ref l1jump, l1label;
15176 dw_loc_descr_ref l2jump, l2label;
15177
15178 if (BITS_PER_UNIT != 8
15179 || (GET_MODE_BITSIZE (mode) != 32
15180 && GET_MODE_BITSIZE (mode) != 64))
15181 return NULL;
15182
15183 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15184 VAR_INIT_STATUS_INITIALIZED);
15185 if (op0 == NULL)
15186 return NULL;
15187
15188 ret = op0;
15189 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15190 mode, mem_mode,
15191 VAR_INIT_STATUS_INITIALIZED);
15192 if (tmp == NULL)
15193 return NULL;
15194 add_loc_descr (&ret, tmp);
15195 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15196 VAR_INIT_STATUS_INITIALIZED);
15197 if (tmp == NULL)
15198 return NULL;
15199 add_loc_descr (&ret, tmp);
15200 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15201 add_loc_descr (&ret, l1label);
15202 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15203 mode, mem_mode,
15204 VAR_INIT_STATUS_INITIALIZED);
15205 add_loc_descr (&ret, tmp);
15206 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15207 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15208 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15209 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15210 VAR_INIT_STATUS_INITIALIZED);
15211 if (tmp == NULL)
15212 return NULL;
15213 add_loc_descr (&ret, tmp);
15214 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15215 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15216 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15217 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15218 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15219 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15220 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15221 VAR_INIT_STATUS_INITIALIZED);
15222 add_loc_descr (&ret, tmp);
15223 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15224 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15225 add_loc_descr (&ret, l2jump);
15226 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15227 VAR_INIT_STATUS_INITIALIZED);
15228 add_loc_descr (&ret, tmp);
15229 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15230 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15231 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15232 add_loc_descr (&ret, l1jump);
15233 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15234 add_loc_descr (&ret, l2label);
15235 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15236 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15237 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15238 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15239 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15240 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15241 return ret;
15242 }
15243
15244 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15245 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15246 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15247 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15248
15249 ROTATERT is similar:
15250 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15251 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15252 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15253
15254 static dw_loc_descr_ref
15255 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15256 machine_mode mem_mode)
15257 {
15258 rtx rtlop1 = XEXP (rtl, 1);
15259 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15260 int i;
15261
15262 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15263 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15264 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15265 VAR_INIT_STATUS_INITIALIZED);
15266 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15267 VAR_INIT_STATUS_INITIALIZED);
15268 if (op0 == NULL || op1 == NULL)
15269 return NULL;
15270 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15271 for (i = 0; i < 2; i++)
15272 {
15273 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15274 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15275 mode, mem_mode,
15276 VAR_INIT_STATUS_INITIALIZED);
15277 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15278 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15279 ? DW_OP_const4u
15280 : HOST_BITS_PER_WIDE_INT == 64
15281 ? DW_OP_const8u : DW_OP_constu,
15282 GET_MODE_MASK (mode), 0);
15283 else
15284 mask[i] = NULL;
15285 if (mask[i] == NULL)
15286 return NULL;
15287 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15288 }
15289 ret = op0;
15290 add_loc_descr (&ret, op1);
15291 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15292 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15293 if (GET_CODE (rtl) == ROTATERT)
15294 {
15295 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15296 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15297 GET_MODE_BITSIZE (mode), 0));
15298 }
15299 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15300 if (mask[0] != NULL)
15301 add_loc_descr (&ret, mask[0]);
15302 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15303 if (mask[1] != NULL)
15304 {
15305 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15306 add_loc_descr (&ret, mask[1]);
15307 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15308 }
15309 if (GET_CODE (rtl) == ROTATE)
15310 {
15311 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15312 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15313 GET_MODE_BITSIZE (mode), 0));
15314 }
15315 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15316 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15317 return ret;
15318 }
15319
15320 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15321 for DEBUG_PARAMETER_REF RTL. */
15322
15323 static dw_loc_descr_ref
15324 parameter_ref_descriptor (rtx rtl)
15325 {
15326 dw_loc_descr_ref ret;
15327 dw_die_ref ref;
15328
15329 if (dwarf_strict)
15330 return NULL;
15331 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15332 /* With LTO during LTRANS we get the late DIE that refers to the early
15333 DIE, thus we add another indirection here. This seems to confuse
15334 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15335 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15336 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15337 if (ref)
15338 {
15339 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15340 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15341 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15342 }
15343 else
15344 {
15345 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15346 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15347 }
15348 return ret;
15349 }
15350
15351 /* The following routine converts the RTL for a variable or parameter
15352 (resident in memory) into an equivalent Dwarf representation of a
15353 mechanism for getting the address of that same variable onto the top of a
15354 hypothetical "address evaluation" stack.
15355
15356 When creating memory location descriptors, we are effectively transforming
15357 the RTL for a memory-resident object into its Dwarf postfix expression
15358 equivalent. This routine recursively descends an RTL tree, turning
15359 it into Dwarf postfix code as it goes.
15360
15361 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15362
15363 MEM_MODE is the mode of the memory reference, needed to handle some
15364 autoincrement addressing modes.
15365
15366 Return 0 if we can't represent the location. */
15367
15368 dw_loc_descr_ref
15369 mem_loc_descriptor (rtx rtl, machine_mode mode,
15370 machine_mode mem_mode,
15371 enum var_init_status initialized)
15372 {
15373 dw_loc_descr_ref mem_loc_result = NULL;
15374 enum dwarf_location_atom op;
15375 dw_loc_descr_ref op0, op1;
15376 rtx inner = NULL_RTX;
15377 poly_int64 offset;
15378
15379 if (mode == VOIDmode)
15380 mode = GET_MODE (rtl);
15381
15382 /* Note that for a dynamically sized array, the location we will generate a
15383 description of here will be the lowest numbered location which is
15384 actually within the array. That's *not* necessarily the same as the
15385 zeroth element of the array. */
15386
15387 rtl = targetm.delegitimize_address (rtl);
15388
15389 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15390 return NULL;
15391
15392 scalar_int_mode int_mode, inner_mode, op1_mode;
15393 switch (GET_CODE (rtl))
15394 {
15395 case POST_INC:
15396 case POST_DEC:
15397 case POST_MODIFY:
15398 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15399
15400 case SUBREG:
15401 /* The case of a subreg may arise when we have a local (register)
15402 variable or a formal (register) parameter which doesn't quite fill
15403 up an entire register. For now, just assume that it is
15404 legitimate to make the Dwarf info refer to the whole register which
15405 contains the given subreg. */
15406 if (!subreg_lowpart_p (rtl))
15407 break;
15408 inner = SUBREG_REG (rtl);
15409 /* FALLTHRU */
15410 case TRUNCATE:
15411 if (inner == NULL_RTX)
15412 inner = XEXP (rtl, 0);
15413 if (is_a <scalar_int_mode> (mode, &int_mode)
15414 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15415 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15416 #ifdef POINTERS_EXTEND_UNSIGNED
15417 || (int_mode == Pmode && mem_mode != VOIDmode)
15418 #endif
15419 )
15420 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15421 {
15422 mem_loc_result = mem_loc_descriptor (inner,
15423 inner_mode,
15424 mem_mode, initialized);
15425 break;
15426 }
15427 if (dwarf_strict && dwarf_version < 5)
15428 break;
15429 if (is_a <scalar_int_mode> (mode, &int_mode)
15430 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15431 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15432 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15433 {
15434 dw_die_ref type_die;
15435 dw_loc_descr_ref cvt;
15436
15437 mem_loc_result = mem_loc_descriptor (inner,
15438 GET_MODE (inner),
15439 mem_mode, initialized);
15440 if (mem_loc_result == NULL)
15441 break;
15442 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15443 if (type_die == NULL)
15444 {
15445 mem_loc_result = NULL;
15446 break;
15447 }
15448 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15449 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15450 else
15451 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15452 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15453 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15454 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15455 add_loc_descr (&mem_loc_result, cvt);
15456 if (is_a <scalar_int_mode> (mode, &int_mode)
15457 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15458 {
15459 /* Convert it to untyped afterwards. */
15460 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15461 add_loc_descr (&mem_loc_result, cvt);
15462 }
15463 }
15464 break;
15465
15466 case REG:
15467 if (!is_a <scalar_int_mode> (mode, &int_mode)
15468 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15469 && rtl != arg_pointer_rtx
15470 && rtl != frame_pointer_rtx
15471 #ifdef POINTERS_EXTEND_UNSIGNED
15472 && (int_mode != Pmode || mem_mode == VOIDmode)
15473 #endif
15474 ))
15475 {
15476 dw_die_ref type_die;
15477 unsigned int dbx_regnum;
15478
15479 if (dwarf_strict && dwarf_version < 5)
15480 break;
15481 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
15482 break;
15483 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15484 if (type_die == NULL)
15485 break;
15486
15487 dbx_regnum = dbx_reg_number (rtl);
15488 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15489 break;
15490 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15491 dbx_regnum, 0);
15492 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15493 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15494 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15495 break;
15496 }
15497 /* Whenever a register number forms a part of the description of the
15498 method for calculating the (dynamic) address of a memory resident
15499 object, DWARF rules require the register number be referred to as
15500 a "base register". This distinction is not based in any way upon
15501 what category of register the hardware believes the given register
15502 belongs to. This is strictly DWARF terminology we're dealing with
15503 here. Note that in cases where the location of a memory-resident
15504 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15505 OP_CONST (0)) the actual DWARF location descriptor that we generate
15506 may just be OP_BASEREG (basereg). This may look deceptively like
15507 the object in question was allocated to a register (rather than in
15508 memory) so DWARF consumers need to be aware of the subtle
15509 distinction between OP_REG and OP_BASEREG. */
15510 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15511 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15512 else if (stack_realign_drap
15513 && crtl->drap_reg
15514 && crtl->args.internal_arg_pointer == rtl
15515 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15516 {
15517 /* If RTL is internal_arg_pointer, which has been optimized
15518 out, use DRAP instead. */
15519 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15520 VAR_INIT_STATUS_INITIALIZED);
15521 }
15522 break;
15523
15524 case SIGN_EXTEND:
15525 case ZERO_EXTEND:
15526 if (!is_a <scalar_int_mode> (mode, &int_mode)
15527 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15528 break;
15529 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15530 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15531 if (op0 == 0)
15532 break;
15533 else if (GET_CODE (rtl) == ZERO_EXTEND
15534 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15535 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15536 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15537 to expand zero extend as two shifts instead of
15538 masking. */
15539 && GET_MODE_SIZE (inner_mode) <= 4)
15540 {
15541 mem_loc_result = op0;
15542 add_loc_descr (&mem_loc_result,
15543 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15544 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15545 }
15546 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15547 {
15548 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15549 shift *= BITS_PER_UNIT;
15550 if (GET_CODE (rtl) == SIGN_EXTEND)
15551 op = DW_OP_shra;
15552 else
15553 op = DW_OP_shr;
15554 mem_loc_result = op0;
15555 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15556 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15557 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15558 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15559 }
15560 else if (!dwarf_strict || dwarf_version >= 5)
15561 {
15562 dw_die_ref type_die1, type_die2;
15563 dw_loc_descr_ref cvt;
15564
15565 type_die1 = base_type_for_mode (inner_mode,
15566 GET_CODE (rtl) == ZERO_EXTEND);
15567 if (type_die1 == NULL)
15568 break;
15569 type_die2 = base_type_for_mode (int_mode, 1);
15570 if (type_die2 == NULL)
15571 break;
15572 mem_loc_result = op0;
15573 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15574 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15575 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15576 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15577 add_loc_descr (&mem_loc_result, cvt);
15578 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15579 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15580 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15581 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15582 add_loc_descr (&mem_loc_result, cvt);
15583 }
15584 break;
15585
15586 case MEM:
15587 {
15588 rtx new_rtl = avoid_constant_pool_reference (rtl);
15589 if (new_rtl != rtl)
15590 {
15591 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15592 initialized);
15593 if (mem_loc_result != NULL)
15594 return mem_loc_result;
15595 }
15596 }
15597 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15598 get_address_mode (rtl), mode,
15599 VAR_INIT_STATUS_INITIALIZED);
15600 if (mem_loc_result == NULL)
15601 mem_loc_result = tls_mem_loc_descriptor (rtl);
15602 if (mem_loc_result != NULL)
15603 {
15604 if (!is_a <scalar_int_mode> (mode, &int_mode)
15605 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15606 {
15607 dw_die_ref type_die;
15608 dw_loc_descr_ref deref;
15609 HOST_WIDE_INT size;
15610
15611 if (dwarf_strict && dwarf_version < 5)
15612 return NULL;
15613 if (!GET_MODE_SIZE (mode).is_constant (&size))
15614 return NULL;
15615 type_die
15616 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15617 if (type_die == NULL)
15618 return NULL;
15619 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15620 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15621 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15622 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15623 add_loc_descr (&mem_loc_result, deref);
15624 }
15625 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15626 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15627 else
15628 add_loc_descr (&mem_loc_result,
15629 new_loc_descr (DW_OP_deref_size,
15630 GET_MODE_SIZE (int_mode), 0));
15631 }
15632 break;
15633
15634 case LO_SUM:
15635 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15636
15637 case LABEL_REF:
15638 /* Some ports can transform a symbol ref into a label ref, because
15639 the symbol ref is too far away and has to be dumped into a constant
15640 pool. */
15641 case CONST:
15642 case SYMBOL_REF:
15643 if (!is_a <scalar_int_mode> (mode, &int_mode)
15644 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15645 #ifdef POINTERS_EXTEND_UNSIGNED
15646 && (int_mode != Pmode || mem_mode == VOIDmode)
15647 #endif
15648 ))
15649 break;
15650 if (GET_CODE (rtl) == SYMBOL_REF
15651 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15652 {
15653 dw_loc_descr_ref temp;
15654
15655 /* If this is not defined, we have no way to emit the data. */
15656 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15657 break;
15658
15659 temp = new_addr_loc_descr (rtl, dtprel_true);
15660
15661 /* We check for DWARF 5 here because gdb did not implement
15662 DW_OP_form_tls_address until after 7.12. */
15663 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15664 ? DW_OP_form_tls_address
15665 : DW_OP_GNU_push_tls_address),
15666 0, 0);
15667 add_loc_descr (&mem_loc_result, temp);
15668
15669 break;
15670 }
15671
15672 if (!const_ok_for_output (rtl))
15673 {
15674 if (GET_CODE (rtl) == CONST)
15675 switch (GET_CODE (XEXP (rtl, 0)))
15676 {
15677 case NOT:
15678 op = DW_OP_not;
15679 goto try_const_unop;
15680 case NEG:
15681 op = DW_OP_neg;
15682 goto try_const_unop;
15683 try_const_unop:
15684 rtx arg;
15685 arg = XEXP (XEXP (rtl, 0), 0);
15686 if (!CONSTANT_P (arg))
15687 arg = gen_rtx_CONST (int_mode, arg);
15688 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15689 initialized);
15690 if (op0)
15691 {
15692 mem_loc_result = op0;
15693 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15694 }
15695 break;
15696 default:
15697 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15698 mem_mode, initialized);
15699 break;
15700 }
15701 break;
15702 }
15703
15704 symref:
15705 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15706 vec_safe_push (used_rtx_array, rtl);
15707 break;
15708
15709 case CONCAT:
15710 case CONCATN:
15711 case VAR_LOCATION:
15712 case DEBUG_IMPLICIT_PTR:
15713 expansion_failed (NULL_TREE, rtl,
15714 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15715 return 0;
15716
15717 case ENTRY_VALUE:
15718 if (dwarf_strict && dwarf_version < 5)
15719 return NULL;
15720 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15721 {
15722 if (!is_a <scalar_int_mode> (mode, &int_mode)
15723 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15724 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15725 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15726 else
15727 {
15728 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15729 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15730 return NULL;
15731 op0 = one_reg_loc_descriptor (dbx_regnum,
15732 VAR_INIT_STATUS_INITIALIZED);
15733 }
15734 }
15735 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15736 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15737 {
15738 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15739 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15740 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15741 return NULL;
15742 }
15743 else
15744 gcc_unreachable ();
15745 if (op0 == NULL)
15746 return NULL;
15747 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15748 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15749 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15750 break;
15751
15752 case DEBUG_PARAMETER_REF:
15753 mem_loc_result = parameter_ref_descriptor (rtl);
15754 break;
15755
15756 case PRE_MODIFY:
15757 /* Extract the PLUS expression nested inside and fall into
15758 PLUS code below. */
15759 rtl = XEXP (rtl, 1);
15760 goto plus;
15761
15762 case PRE_INC:
15763 case PRE_DEC:
15764 /* Turn these into a PLUS expression and fall into the PLUS code
15765 below. */
15766 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15767 gen_int_mode (GET_CODE (rtl) == PRE_INC
15768 ? GET_MODE_UNIT_SIZE (mem_mode)
15769 : -GET_MODE_UNIT_SIZE (mem_mode),
15770 mode));
15771
15772 /* fall through */
15773
15774 case PLUS:
15775 plus:
15776 if (is_based_loc (rtl)
15777 && is_a <scalar_int_mode> (mode, &int_mode)
15778 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15779 || XEXP (rtl, 0) == arg_pointer_rtx
15780 || XEXP (rtl, 0) == frame_pointer_rtx))
15781 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15782 INTVAL (XEXP (rtl, 1)),
15783 VAR_INIT_STATUS_INITIALIZED);
15784 else
15785 {
15786 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15787 VAR_INIT_STATUS_INITIALIZED);
15788 if (mem_loc_result == 0)
15789 break;
15790
15791 if (CONST_INT_P (XEXP (rtl, 1))
15792 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15793 <= DWARF2_ADDR_SIZE))
15794 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15795 else
15796 {
15797 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15798 VAR_INIT_STATUS_INITIALIZED);
15799 if (op1 == 0)
15800 return NULL;
15801 add_loc_descr (&mem_loc_result, op1);
15802 add_loc_descr (&mem_loc_result,
15803 new_loc_descr (DW_OP_plus, 0, 0));
15804 }
15805 }
15806 break;
15807
15808 /* If a pseudo-reg is optimized away, it is possible for it to
15809 be replaced with a MEM containing a multiply or shift. */
15810 case MINUS:
15811 op = DW_OP_minus;
15812 goto do_binop;
15813
15814 case MULT:
15815 op = DW_OP_mul;
15816 goto do_binop;
15817
15818 case DIV:
15819 if ((!dwarf_strict || dwarf_version >= 5)
15820 && is_a <scalar_int_mode> (mode, &int_mode)
15821 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15822 {
15823 mem_loc_result = typed_binop (DW_OP_div, rtl,
15824 base_type_for_mode (mode, 0),
15825 int_mode, mem_mode);
15826 break;
15827 }
15828 op = DW_OP_div;
15829 goto do_binop;
15830
15831 case UMOD:
15832 op = DW_OP_mod;
15833 goto do_binop;
15834
15835 case ASHIFT:
15836 op = DW_OP_shl;
15837 goto do_shift;
15838
15839 case ASHIFTRT:
15840 op = DW_OP_shra;
15841 goto do_shift;
15842
15843 case LSHIFTRT:
15844 op = DW_OP_shr;
15845 goto do_shift;
15846
15847 do_shift:
15848 if (!is_a <scalar_int_mode> (mode, &int_mode))
15849 break;
15850 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15851 VAR_INIT_STATUS_INITIALIZED);
15852 {
15853 rtx rtlop1 = XEXP (rtl, 1);
15854 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15855 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15856 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15857 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15858 VAR_INIT_STATUS_INITIALIZED);
15859 }
15860
15861 if (op0 == 0 || op1 == 0)
15862 break;
15863
15864 mem_loc_result = op0;
15865 add_loc_descr (&mem_loc_result, op1);
15866 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15867 break;
15868
15869 case AND:
15870 op = DW_OP_and;
15871 goto do_binop;
15872
15873 case IOR:
15874 op = DW_OP_or;
15875 goto do_binop;
15876
15877 case XOR:
15878 op = DW_OP_xor;
15879 goto do_binop;
15880
15881 do_binop:
15882 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15883 VAR_INIT_STATUS_INITIALIZED);
15884 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15885 VAR_INIT_STATUS_INITIALIZED);
15886
15887 if (op0 == 0 || op1 == 0)
15888 break;
15889
15890 mem_loc_result = op0;
15891 add_loc_descr (&mem_loc_result, op1);
15892 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15893 break;
15894
15895 case MOD:
15896 if ((!dwarf_strict || dwarf_version >= 5)
15897 && is_a <scalar_int_mode> (mode, &int_mode)
15898 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15899 {
15900 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15901 base_type_for_mode (mode, 0),
15902 int_mode, mem_mode);
15903 break;
15904 }
15905
15906 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15907 VAR_INIT_STATUS_INITIALIZED);
15908 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15909 VAR_INIT_STATUS_INITIALIZED);
15910
15911 if (op0 == 0 || op1 == 0)
15912 break;
15913
15914 mem_loc_result = op0;
15915 add_loc_descr (&mem_loc_result, op1);
15916 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15917 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15918 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15919 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15920 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15921 break;
15922
15923 case UDIV:
15924 if ((!dwarf_strict || dwarf_version >= 5)
15925 && is_a <scalar_int_mode> (mode, &int_mode))
15926 {
15927 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15928 {
15929 op = DW_OP_div;
15930 goto do_binop;
15931 }
15932 mem_loc_result = typed_binop (DW_OP_div, rtl,
15933 base_type_for_mode (int_mode, 1),
15934 int_mode, mem_mode);
15935 }
15936 break;
15937
15938 case NOT:
15939 op = DW_OP_not;
15940 goto do_unop;
15941
15942 case ABS:
15943 op = DW_OP_abs;
15944 goto do_unop;
15945
15946 case NEG:
15947 op = DW_OP_neg;
15948 goto do_unop;
15949
15950 do_unop:
15951 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15952 VAR_INIT_STATUS_INITIALIZED);
15953
15954 if (op0 == 0)
15955 break;
15956
15957 mem_loc_result = op0;
15958 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15959 break;
15960
15961 case CONST_INT:
15962 if (!is_a <scalar_int_mode> (mode, &int_mode)
15963 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15964 #ifdef POINTERS_EXTEND_UNSIGNED
15965 || (int_mode == Pmode
15966 && mem_mode != VOIDmode
15967 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15968 #endif
15969 )
15970 {
15971 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15972 break;
15973 }
15974 if ((!dwarf_strict || dwarf_version >= 5)
15975 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15976 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15977 {
15978 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15979 scalar_int_mode amode;
15980 if (type_die == NULL)
15981 return NULL;
15982 if (INTVAL (rtl) >= 0
15983 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15984 .exists (&amode))
15985 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15986 /* const DW_OP_convert <XXX> vs.
15987 DW_OP_const_type <XXX, 1, const>. */
15988 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15989 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15990 {
15991 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15992 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15993 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15994 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15995 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15996 add_loc_descr (&mem_loc_result, op0);
15997 return mem_loc_result;
15998 }
15999 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
16000 INTVAL (rtl));
16001 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16002 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16003 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16004 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16005 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16006 else
16007 {
16008 mem_loc_result->dw_loc_oprnd2.val_class
16009 = dw_val_class_const_double;
16010 mem_loc_result->dw_loc_oprnd2.v.val_double
16011 = double_int::from_shwi (INTVAL (rtl));
16012 }
16013 }
16014 break;
16015
16016 case CONST_DOUBLE:
16017 if (!dwarf_strict || dwarf_version >= 5)
16018 {
16019 dw_die_ref type_die;
16020
16021 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16022 CONST_DOUBLE rtx could represent either a large integer
16023 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16024 the value is always a floating point constant.
16025
16026 When it is an integer, a CONST_DOUBLE is used whenever
16027 the constant requires 2 HWIs to be adequately represented.
16028 We output CONST_DOUBLEs as blocks. */
16029 if (mode == VOIDmode
16030 || (GET_MODE (rtl) == VOIDmode
16031 && maybe_ne (GET_MODE_BITSIZE (mode),
16032 HOST_BITS_PER_DOUBLE_INT)))
16033 break;
16034 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16035 if (type_die == NULL)
16036 return NULL;
16037 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16038 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16039 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16040 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16041 #if TARGET_SUPPORTS_WIDE_INT == 0
16042 if (!SCALAR_FLOAT_MODE_P (mode))
16043 {
16044 mem_loc_result->dw_loc_oprnd2.val_class
16045 = dw_val_class_const_double;
16046 mem_loc_result->dw_loc_oprnd2.v.val_double
16047 = rtx_to_double_int (rtl);
16048 }
16049 else
16050 #endif
16051 {
16052 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16053 unsigned int length = GET_MODE_SIZE (float_mode);
16054 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16055
16056 insert_float (rtl, array);
16057 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16058 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16059 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16060 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16061 }
16062 }
16063 break;
16064
16065 case CONST_WIDE_INT:
16066 if (!dwarf_strict || dwarf_version >= 5)
16067 {
16068 dw_die_ref type_die;
16069
16070 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16071 if (type_die == NULL)
16072 return NULL;
16073 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16074 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16075 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16076 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16077 mem_loc_result->dw_loc_oprnd2.val_class
16078 = dw_val_class_wide_int;
16079 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16080 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16081 }
16082 break;
16083
16084 case CONST_POLY_INT:
16085 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16086 break;
16087
16088 case EQ:
16089 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16090 break;
16091
16092 case GE:
16093 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16094 break;
16095
16096 case GT:
16097 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16098 break;
16099
16100 case LE:
16101 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16102 break;
16103
16104 case LT:
16105 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16106 break;
16107
16108 case NE:
16109 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16110 break;
16111
16112 case GEU:
16113 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16114 break;
16115
16116 case GTU:
16117 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16118 break;
16119
16120 case LEU:
16121 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16122 break;
16123
16124 case LTU:
16125 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16126 break;
16127
16128 case UMIN:
16129 case UMAX:
16130 if (!SCALAR_INT_MODE_P (mode))
16131 break;
16132 /* FALLTHRU */
16133 case SMIN:
16134 case SMAX:
16135 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16136 break;
16137
16138 case ZERO_EXTRACT:
16139 case SIGN_EXTRACT:
16140 if (CONST_INT_P (XEXP (rtl, 1))
16141 && CONST_INT_P (XEXP (rtl, 2))
16142 && is_a <scalar_int_mode> (mode, &int_mode)
16143 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16144 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16145 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16146 && ((unsigned) INTVAL (XEXP (rtl, 1))
16147 + (unsigned) INTVAL (XEXP (rtl, 2))
16148 <= GET_MODE_BITSIZE (int_mode)))
16149 {
16150 int shift, size;
16151 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16152 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16153 if (op0 == 0)
16154 break;
16155 if (GET_CODE (rtl) == SIGN_EXTRACT)
16156 op = DW_OP_shra;
16157 else
16158 op = DW_OP_shr;
16159 mem_loc_result = op0;
16160 size = INTVAL (XEXP (rtl, 1));
16161 shift = INTVAL (XEXP (rtl, 2));
16162 if (BITS_BIG_ENDIAN)
16163 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16164 if (shift + size != (int) DWARF2_ADDR_SIZE)
16165 {
16166 add_loc_descr (&mem_loc_result,
16167 int_loc_descriptor (DWARF2_ADDR_SIZE
16168 - shift - size));
16169 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16170 }
16171 if (size != (int) DWARF2_ADDR_SIZE)
16172 {
16173 add_loc_descr (&mem_loc_result,
16174 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16175 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16176 }
16177 }
16178 break;
16179
16180 case IF_THEN_ELSE:
16181 {
16182 dw_loc_descr_ref op2, bra_node, drop_node;
16183 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16184 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16185 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16186 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16187 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16188 VAR_INIT_STATUS_INITIALIZED);
16189 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16190 VAR_INIT_STATUS_INITIALIZED);
16191 if (op0 == NULL || op1 == NULL || op2 == NULL)
16192 break;
16193
16194 mem_loc_result = op1;
16195 add_loc_descr (&mem_loc_result, op2);
16196 add_loc_descr (&mem_loc_result, op0);
16197 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16198 add_loc_descr (&mem_loc_result, bra_node);
16199 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16200 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16201 add_loc_descr (&mem_loc_result, drop_node);
16202 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16203 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16204 }
16205 break;
16206
16207 case FLOAT_EXTEND:
16208 case FLOAT_TRUNCATE:
16209 case FLOAT:
16210 case UNSIGNED_FLOAT:
16211 case FIX:
16212 case UNSIGNED_FIX:
16213 if (!dwarf_strict || dwarf_version >= 5)
16214 {
16215 dw_die_ref type_die;
16216 dw_loc_descr_ref cvt;
16217
16218 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16219 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16220 if (op0 == NULL)
16221 break;
16222 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16223 && (GET_CODE (rtl) == FLOAT
16224 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16225 {
16226 type_die = base_type_for_mode (int_mode,
16227 GET_CODE (rtl) == UNSIGNED_FLOAT);
16228 if (type_die == NULL)
16229 break;
16230 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16231 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16232 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16233 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16234 add_loc_descr (&op0, cvt);
16235 }
16236 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16237 if (type_die == NULL)
16238 break;
16239 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16240 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16241 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16242 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16243 add_loc_descr (&op0, cvt);
16244 if (is_a <scalar_int_mode> (mode, &int_mode)
16245 && (GET_CODE (rtl) == FIX
16246 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16247 {
16248 op0 = convert_descriptor_to_mode (int_mode, op0);
16249 if (op0 == NULL)
16250 break;
16251 }
16252 mem_loc_result = op0;
16253 }
16254 break;
16255
16256 case CLZ:
16257 case CTZ:
16258 case FFS:
16259 if (is_a <scalar_int_mode> (mode, &int_mode))
16260 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16261 break;
16262
16263 case POPCOUNT:
16264 case PARITY:
16265 if (is_a <scalar_int_mode> (mode, &int_mode))
16266 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16267 break;
16268
16269 case BSWAP:
16270 if (is_a <scalar_int_mode> (mode, &int_mode))
16271 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16272 break;
16273
16274 case ROTATE:
16275 case ROTATERT:
16276 if (is_a <scalar_int_mode> (mode, &int_mode))
16277 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16278 break;
16279
16280 case COMPARE:
16281 /* In theory, we could implement the above. */
16282 /* DWARF cannot represent the unsigned compare operations
16283 natively. */
16284 case SS_MULT:
16285 case US_MULT:
16286 case SS_DIV:
16287 case US_DIV:
16288 case SS_PLUS:
16289 case US_PLUS:
16290 case SS_MINUS:
16291 case US_MINUS:
16292 case SS_NEG:
16293 case US_NEG:
16294 case SS_ABS:
16295 case SS_ASHIFT:
16296 case US_ASHIFT:
16297 case SS_TRUNCATE:
16298 case US_TRUNCATE:
16299 case UNORDERED:
16300 case ORDERED:
16301 case UNEQ:
16302 case UNGE:
16303 case UNGT:
16304 case UNLE:
16305 case UNLT:
16306 case LTGT:
16307 case FRACT_CONVERT:
16308 case UNSIGNED_FRACT_CONVERT:
16309 case SAT_FRACT:
16310 case UNSIGNED_SAT_FRACT:
16311 case SQRT:
16312 case ASM_OPERANDS:
16313 case VEC_MERGE:
16314 case VEC_SELECT:
16315 case VEC_CONCAT:
16316 case VEC_DUPLICATE:
16317 case VEC_SERIES:
16318 case UNSPEC:
16319 case HIGH:
16320 case FMA:
16321 case STRICT_LOW_PART:
16322 case CONST_VECTOR:
16323 case CONST_FIXED:
16324 case CLRSB:
16325 case CLOBBER:
16326 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16327 can't express it in the debug info. This can happen e.g. with some
16328 TLS UNSPECs. */
16329 break;
16330
16331 case CONST_STRING:
16332 resolve_one_addr (&rtl);
16333 goto symref;
16334
16335 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16336 the expression. An UNSPEC rtx represents a raw DWARF operation,
16337 new_loc_descr is called for it to build the operation directly.
16338 Otherwise mem_loc_descriptor is called recursively. */
16339 case PARALLEL:
16340 {
16341 int index = 0;
16342 dw_loc_descr_ref exp_result = NULL;
16343
16344 for (; index < XVECLEN (rtl, 0); index++)
16345 {
16346 rtx elem = XVECEXP (rtl, 0, index);
16347 if (GET_CODE (elem) == UNSPEC)
16348 {
16349 /* Each DWARF operation UNSPEC contain two operands, if
16350 one operand is not used for the operation, const0_rtx is
16351 passed. */
16352 gcc_assert (XVECLEN (elem, 0) == 2);
16353
16354 HOST_WIDE_INT dw_op = XINT (elem, 1);
16355 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16356 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16357 exp_result
16358 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16359 oprnd2);
16360 }
16361 else
16362 exp_result
16363 = mem_loc_descriptor (elem, mode, mem_mode,
16364 VAR_INIT_STATUS_INITIALIZED);
16365
16366 if (!mem_loc_result)
16367 mem_loc_result = exp_result;
16368 else
16369 add_loc_descr (&mem_loc_result, exp_result);
16370 }
16371
16372 break;
16373 }
16374
16375 default:
16376 if (flag_checking)
16377 {
16378 print_rtl (stderr, rtl);
16379 gcc_unreachable ();
16380 }
16381 break;
16382 }
16383
16384 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16385 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16386
16387 return mem_loc_result;
16388 }
16389
16390 /* Return a descriptor that describes the concatenation of two locations.
16391 This is typically a complex variable. */
16392
16393 static dw_loc_descr_ref
16394 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16395 {
16396 /* At present we only track constant-sized pieces. */
16397 unsigned int size0, size1;
16398 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16399 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16400 return 0;
16401
16402 dw_loc_descr_ref cc_loc_result = NULL;
16403 dw_loc_descr_ref x0_ref
16404 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16405 dw_loc_descr_ref x1_ref
16406 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16407
16408 if (x0_ref == 0 || x1_ref == 0)
16409 return 0;
16410
16411 cc_loc_result = x0_ref;
16412 add_loc_descr_op_piece (&cc_loc_result, size0);
16413
16414 add_loc_descr (&cc_loc_result, x1_ref);
16415 add_loc_descr_op_piece (&cc_loc_result, size1);
16416
16417 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16418 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16419
16420 return cc_loc_result;
16421 }
16422
16423 /* Return a descriptor that describes the concatenation of N
16424 locations. */
16425
16426 static dw_loc_descr_ref
16427 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16428 {
16429 unsigned int i;
16430 dw_loc_descr_ref cc_loc_result = NULL;
16431 unsigned int n = XVECLEN (concatn, 0);
16432 unsigned int size;
16433
16434 for (i = 0; i < n; ++i)
16435 {
16436 dw_loc_descr_ref ref;
16437 rtx x = XVECEXP (concatn, 0, i);
16438
16439 /* At present we only track constant-sized pieces. */
16440 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16441 return NULL;
16442
16443 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16444 if (ref == NULL)
16445 return NULL;
16446
16447 add_loc_descr (&cc_loc_result, ref);
16448 add_loc_descr_op_piece (&cc_loc_result, size);
16449 }
16450
16451 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16452 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16453
16454 return cc_loc_result;
16455 }
16456
16457 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16458 for DEBUG_IMPLICIT_PTR RTL. */
16459
16460 static dw_loc_descr_ref
16461 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16462 {
16463 dw_loc_descr_ref ret;
16464 dw_die_ref ref;
16465
16466 if (dwarf_strict && dwarf_version < 5)
16467 return NULL;
16468 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16469 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16470 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16471 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16472 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16473 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16474 if (ref)
16475 {
16476 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16477 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16478 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16479 }
16480 else
16481 {
16482 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16483 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16484 }
16485 return ret;
16486 }
16487
16488 /* Output a proper Dwarf location descriptor for a variable or parameter
16489 which is either allocated in a register or in a memory location. For a
16490 register, we just generate an OP_REG and the register number. For a
16491 memory location we provide a Dwarf postfix expression describing how to
16492 generate the (dynamic) address of the object onto the address stack.
16493
16494 MODE is mode of the decl if this loc_descriptor is going to be used in
16495 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16496 allowed, VOIDmode otherwise.
16497
16498 If we don't know how to describe it, return 0. */
16499
16500 static dw_loc_descr_ref
16501 loc_descriptor (rtx rtl, machine_mode mode,
16502 enum var_init_status initialized)
16503 {
16504 dw_loc_descr_ref loc_result = NULL;
16505 scalar_int_mode int_mode;
16506
16507 switch (GET_CODE (rtl))
16508 {
16509 case SUBREG:
16510 /* The case of a subreg may arise when we have a local (register)
16511 variable or a formal (register) parameter which doesn't quite fill
16512 up an entire register. For now, just assume that it is
16513 legitimate to make the Dwarf info refer to the whole register which
16514 contains the given subreg. */
16515 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16516 loc_result = loc_descriptor (SUBREG_REG (rtl),
16517 GET_MODE (SUBREG_REG (rtl)), initialized);
16518 else
16519 goto do_default;
16520 break;
16521
16522 case REG:
16523 loc_result = reg_loc_descriptor (rtl, initialized);
16524 break;
16525
16526 case MEM:
16527 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16528 GET_MODE (rtl), initialized);
16529 if (loc_result == NULL)
16530 loc_result = tls_mem_loc_descriptor (rtl);
16531 if (loc_result == NULL)
16532 {
16533 rtx new_rtl = avoid_constant_pool_reference (rtl);
16534 if (new_rtl != rtl)
16535 loc_result = loc_descriptor (new_rtl, mode, initialized);
16536 }
16537 break;
16538
16539 case CONCAT:
16540 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16541 initialized);
16542 break;
16543
16544 case CONCATN:
16545 loc_result = concatn_loc_descriptor (rtl, initialized);
16546 break;
16547
16548 case VAR_LOCATION:
16549 /* Single part. */
16550 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16551 {
16552 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16553 if (GET_CODE (loc) == EXPR_LIST)
16554 loc = XEXP (loc, 0);
16555 loc_result = loc_descriptor (loc, mode, initialized);
16556 break;
16557 }
16558
16559 rtl = XEXP (rtl, 1);
16560 /* FALLTHRU */
16561
16562 case PARALLEL:
16563 {
16564 rtvec par_elems = XVEC (rtl, 0);
16565 int num_elem = GET_NUM_ELEM (par_elems);
16566 machine_mode mode;
16567 int i, size;
16568
16569 /* Create the first one, so we have something to add to. */
16570 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16571 VOIDmode, initialized);
16572 if (loc_result == NULL)
16573 return NULL;
16574 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16575 /* At present we only track constant-sized pieces. */
16576 if (!GET_MODE_SIZE (mode).is_constant (&size))
16577 return NULL;
16578 add_loc_descr_op_piece (&loc_result, size);
16579 for (i = 1; i < num_elem; i++)
16580 {
16581 dw_loc_descr_ref temp;
16582
16583 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16584 VOIDmode, initialized);
16585 if (temp == NULL)
16586 return NULL;
16587 add_loc_descr (&loc_result, temp);
16588 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16589 /* At present we only track constant-sized pieces. */
16590 if (!GET_MODE_SIZE (mode).is_constant (&size))
16591 return NULL;
16592 add_loc_descr_op_piece (&loc_result, size);
16593 }
16594 }
16595 break;
16596
16597 case CONST_INT:
16598 if (mode != VOIDmode && mode != BLKmode)
16599 {
16600 int_mode = as_a <scalar_int_mode> (mode);
16601 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16602 INTVAL (rtl));
16603 }
16604 break;
16605
16606 case CONST_DOUBLE:
16607 if (mode == VOIDmode)
16608 mode = GET_MODE (rtl);
16609
16610 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16611 {
16612 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16613
16614 /* Note that a CONST_DOUBLE rtx could represent either an integer
16615 or a floating-point constant. A CONST_DOUBLE is used whenever
16616 the constant requires more than one word in order to be
16617 adequately represented. We output CONST_DOUBLEs as blocks. */
16618 scalar_mode smode = as_a <scalar_mode> (mode);
16619 loc_result = new_loc_descr (DW_OP_implicit_value,
16620 GET_MODE_SIZE (smode), 0);
16621 #if TARGET_SUPPORTS_WIDE_INT == 0
16622 if (!SCALAR_FLOAT_MODE_P (smode))
16623 {
16624 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16625 loc_result->dw_loc_oprnd2.v.val_double
16626 = rtx_to_double_int (rtl);
16627 }
16628 else
16629 #endif
16630 {
16631 unsigned int length = GET_MODE_SIZE (smode);
16632 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16633
16634 insert_float (rtl, array);
16635 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16636 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16637 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16638 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16639 }
16640 }
16641 break;
16642
16643 case CONST_WIDE_INT:
16644 if (mode == VOIDmode)
16645 mode = GET_MODE (rtl);
16646
16647 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16648 {
16649 int_mode = as_a <scalar_int_mode> (mode);
16650 loc_result = new_loc_descr (DW_OP_implicit_value,
16651 GET_MODE_SIZE (int_mode), 0);
16652 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16653 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16654 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16655 }
16656 break;
16657
16658 case CONST_VECTOR:
16659 if (mode == VOIDmode)
16660 mode = GET_MODE (rtl);
16661
16662 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16663 {
16664 unsigned int length;
16665 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16666 return NULL;
16667
16668 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16669 unsigned char *array
16670 = ggc_vec_alloc<unsigned char> (length * elt_size);
16671 unsigned int i;
16672 unsigned char *p;
16673 machine_mode imode = GET_MODE_INNER (mode);
16674
16675 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16676 switch (GET_MODE_CLASS (mode))
16677 {
16678 case MODE_VECTOR_INT:
16679 for (i = 0, p = array; i < length; i++, p += elt_size)
16680 {
16681 rtx elt = CONST_VECTOR_ELT (rtl, i);
16682 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16683 }
16684 break;
16685
16686 case MODE_VECTOR_FLOAT:
16687 for (i = 0, p = array; i < length; i++, p += elt_size)
16688 {
16689 rtx elt = CONST_VECTOR_ELT (rtl, i);
16690 insert_float (elt, p);
16691 }
16692 break;
16693
16694 default:
16695 gcc_unreachable ();
16696 }
16697
16698 loc_result = new_loc_descr (DW_OP_implicit_value,
16699 length * elt_size, 0);
16700 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16701 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16702 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16703 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16704 }
16705 break;
16706
16707 case CONST:
16708 if (mode == VOIDmode
16709 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16710 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16711 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16712 {
16713 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16714 break;
16715 }
16716 /* FALLTHROUGH */
16717 case SYMBOL_REF:
16718 if (!const_ok_for_output (rtl))
16719 break;
16720 /* FALLTHROUGH */
16721 case LABEL_REF:
16722 if (is_a <scalar_int_mode> (mode, &int_mode)
16723 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16724 && (dwarf_version >= 4 || !dwarf_strict))
16725 {
16726 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16727 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16728 vec_safe_push (used_rtx_array, rtl);
16729 }
16730 break;
16731
16732 case DEBUG_IMPLICIT_PTR:
16733 loc_result = implicit_ptr_descriptor (rtl, 0);
16734 break;
16735
16736 case PLUS:
16737 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16738 && CONST_INT_P (XEXP (rtl, 1)))
16739 {
16740 loc_result
16741 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16742 break;
16743 }
16744 /* FALLTHRU */
16745 do_default:
16746 default:
16747 if ((is_a <scalar_int_mode> (mode, &int_mode)
16748 && GET_MODE (rtl) == int_mode
16749 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16750 && dwarf_version >= 4)
16751 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16752 {
16753 /* Value expression. */
16754 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16755 if (loc_result)
16756 add_loc_descr (&loc_result,
16757 new_loc_descr (DW_OP_stack_value, 0, 0));
16758 }
16759 break;
16760 }
16761
16762 return loc_result;
16763 }
16764
16765 /* We need to figure out what section we should use as the base for the
16766 address ranges where a given location is valid.
16767 1. If this particular DECL has a section associated with it, use that.
16768 2. If this function has a section associated with it, use that.
16769 3. Otherwise, use the text section.
16770 XXX: If you split a variable across multiple sections, we won't notice. */
16771
16772 static const char *
16773 secname_for_decl (const_tree decl)
16774 {
16775 const char *secname;
16776
16777 if (VAR_OR_FUNCTION_DECL_P (decl)
16778 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16779 && DECL_SECTION_NAME (decl))
16780 secname = DECL_SECTION_NAME (decl);
16781 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16782 secname = DECL_SECTION_NAME (current_function_decl);
16783 else if (cfun && in_cold_section_p)
16784 secname = crtl->subsections.cold_section_label;
16785 else
16786 secname = text_section_label;
16787
16788 return secname;
16789 }
16790
16791 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16792
16793 static bool
16794 decl_by_reference_p (tree decl)
16795 {
16796 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16797 || VAR_P (decl))
16798 && DECL_BY_REFERENCE (decl));
16799 }
16800
16801 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16802 for VARLOC. */
16803
16804 static dw_loc_descr_ref
16805 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16806 enum var_init_status initialized)
16807 {
16808 int have_address = 0;
16809 dw_loc_descr_ref descr;
16810 machine_mode mode;
16811
16812 if (want_address != 2)
16813 {
16814 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16815 /* Single part. */
16816 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16817 {
16818 varloc = PAT_VAR_LOCATION_LOC (varloc);
16819 if (GET_CODE (varloc) == EXPR_LIST)
16820 varloc = XEXP (varloc, 0);
16821 mode = GET_MODE (varloc);
16822 if (MEM_P (varloc))
16823 {
16824 rtx addr = XEXP (varloc, 0);
16825 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16826 mode, initialized);
16827 if (descr)
16828 have_address = 1;
16829 else
16830 {
16831 rtx x = avoid_constant_pool_reference (varloc);
16832 if (x != varloc)
16833 descr = mem_loc_descriptor (x, mode, VOIDmode,
16834 initialized);
16835 }
16836 }
16837 else
16838 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16839 }
16840 else
16841 return 0;
16842 }
16843 else
16844 {
16845 if (GET_CODE (varloc) == VAR_LOCATION)
16846 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16847 else
16848 mode = DECL_MODE (loc);
16849 descr = loc_descriptor (varloc, mode, initialized);
16850 have_address = 1;
16851 }
16852
16853 if (!descr)
16854 return 0;
16855
16856 if (want_address == 2 && !have_address
16857 && (dwarf_version >= 4 || !dwarf_strict))
16858 {
16859 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16860 {
16861 expansion_failed (loc, NULL_RTX,
16862 "DWARF address size mismatch");
16863 return 0;
16864 }
16865 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16866 have_address = 1;
16867 }
16868 /* Show if we can't fill the request for an address. */
16869 if (want_address && !have_address)
16870 {
16871 expansion_failed (loc, NULL_RTX,
16872 "Want address and only have value");
16873 return 0;
16874 }
16875
16876 /* If we've got an address and don't want one, dereference. */
16877 if (!want_address && have_address)
16878 {
16879 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16880 enum dwarf_location_atom op;
16881
16882 if (size > DWARF2_ADDR_SIZE || size == -1)
16883 {
16884 expansion_failed (loc, NULL_RTX,
16885 "DWARF address size mismatch");
16886 return 0;
16887 }
16888 else if (size == DWARF2_ADDR_SIZE)
16889 op = DW_OP_deref;
16890 else
16891 op = DW_OP_deref_size;
16892
16893 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16894 }
16895
16896 return descr;
16897 }
16898
16899 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16900 if it is not possible. */
16901
16902 static dw_loc_descr_ref
16903 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16904 {
16905 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16906 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16907 else if (dwarf_version >= 3 || !dwarf_strict)
16908 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16909 else
16910 return NULL;
16911 }
16912
16913 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16914 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16915
16916 static dw_loc_descr_ref
16917 dw_sra_loc_expr (tree decl, rtx loc)
16918 {
16919 rtx p;
16920 unsigned HOST_WIDE_INT padsize = 0;
16921 dw_loc_descr_ref descr, *descr_tail;
16922 unsigned HOST_WIDE_INT decl_size;
16923 rtx varloc;
16924 enum var_init_status initialized;
16925
16926 if (DECL_SIZE (decl) == NULL
16927 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16928 return NULL;
16929
16930 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16931 descr = NULL;
16932 descr_tail = &descr;
16933
16934 for (p = loc; p; p = XEXP (p, 1))
16935 {
16936 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16937 rtx loc_note = *decl_piece_varloc_ptr (p);
16938 dw_loc_descr_ref cur_descr;
16939 dw_loc_descr_ref *tail, last = NULL;
16940 unsigned HOST_WIDE_INT opsize = 0;
16941
16942 if (loc_note == NULL_RTX
16943 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16944 {
16945 padsize += bitsize;
16946 continue;
16947 }
16948 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16949 varloc = NOTE_VAR_LOCATION (loc_note);
16950 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16951 if (cur_descr == NULL)
16952 {
16953 padsize += bitsize;
16954 continue;
16955 }
16956
16957 /* Check that cur_descr either doesn't use
16958 DW_OP_*piece operations, or their sum is equal
16959 to bitsize. Otherwise we can't embed it. */
16960 for (tail = &cur_descr; *tail != NULL;
16961 tail = &(*tail)->dw_loc_next)
16962 if ((*tail)->dw_loc_opc == DW_OP_piece)
16963 {
16964 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16965 * BITS_PER_UNIT;
16966 last = *tail;
16967 }
16968 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16969 {
16970 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16971 last = *tail;
16972 }
16973
16974 if (last != NULL && opsize != bitsize)
16975 {
16976 padsize += bitsize;
16977 /* Discard the current piece of the descriptor and release any
16978 addr_table entries it uses. */
16979 remove_loc_list_addr_table_entries (cur_descr);
16980 continue;
16981 }
16982
16983 /* If there is a hole, add DW_OP_*piece after empty DWARF
16984 expression, which means that those bits are optimized out. */
16985 if (padsize)
16986 {
16987 if (padsize > decl_size)
16988 {
16989 remove_loc_list_addr_table_entries (cur_descr);
16990 goto discard_descr;
16991 }
16992 decl_size -= padsize;
16993 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16994 if (*descr_tail == NULL)
16995 {
16996 remove_loc_list_addr_table_entries (cur_descr);
16997 goto discard_descr;
16998 }
16999 descr_tail = &(*descr_tail)->dw_loc_next;
17000 padsize = 0;
17001 }
17002 *descr_tail = cur_descr;
17003 descr_tail = tail;
17004 if (bitsize > decl_size)
17005 goto discard_descr;
17006 decl_size -= bitsize;
17007 if (last == NULL)
17008 {
17009 HOST_WIDE_INT offset = 0;
17010 if (GET_CODE (varloc) == VAR_LOCATION
17011 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17012 {
17013 varloc = PAT_VAR_LOCATION_LOC (varloc);
17014 if (GET_CODE (varloc) == EXPR_LIST)
17015 varloc = XEXP (varloc, 0);
17016 }
17017 do
17018 {
17019 if (GET_CODE (varloc) == CONST
17020 || GET_CODE (varloc) == SIGN_EXTEND
17021 || GET_CODE (varloc) == ZERO_EXTEND)
17022 varloc = XEXP (varloc, 0);
17023 else if (GET_CODE (varloc) == SUBREG)
17024 varloc = SUBREG_REG (varloc);
17025 else
17026 break;
17027 }
17028 while (1);
17029 /* DW_OP_bit_size offset should be zero for register
17030 or implicit location descriptions and empty location
17031 descriptions, but for memory addresses needs big endian
17032 adjustment. */
17033 if (MEM_P (varloc))
17034 {
17035 unsigned HOST_WIDE_INT memsize;
17036 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17037 goto discard_descr;
17038 memsize *= BITS_PER_UNIT;
17039 if (memsize != bitsize)
17040 {
17041 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17042 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17043 goto discard_descr;
17044 if (memsize < bitsize)
17045 goto discard_descr;
17046 if (BITS_BIG_ENDIAN)
17047 offset = memsize - bitsize;
17048 }
17049 }
17050
17051 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17052 if (*descr_tail == NULL)
17053 goto discard_descr;
17054 descr_tail = &(*descr_tail)->dw_loc_next;
17055 }
17056 }
17057
17058 /* If there were any non-empty expressions, add padding till the end of
17059 the decl. */
17060 if (descr != NULL && decl_size != 0)
17061 {
17062 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17063 if (*descr_tail == NULL)
17064 goto discard_descr;
17065 }
17066 return descr;
17067
17068 discard_descr:
17069 /* Discard the descriptor and release any addr_table entries it uses. */
17070 remove_loc_list_addr_table_entries (descr);
17071 return NULL;
17072 }
17073
17074 /* Return the dwarf representation of the location list LOC_LIST of
17075 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17076 function. */
17077
17078 static dw_loc_list_ref
17079 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17080 {
17081 const char *endname, *secname;
17082 var_loc_view endview;
17083 rtx varloc;
17084 enum var_init_status initialized;
17085 struct var_loc_node *node;
17086 dw_loc_descr_ref descr;
17087 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17088 dw_loc_list_ref list = NULL;
17089 dw_loc_list_ref *listp = &list;
17090
17091 /* Now that we know what section we are using for a base,
17092 actually construct the list of locations.
17093 The first location information is what is passed to the
17094 function that creates the location list, and the remaining
17095 locations just get added on to that list.
17096 Note that we only know the start address for a location
17097 (IE location changes), so to build the range, we use
17098 the range [current location start, next location start].
17099 This means we have to special case the last node, and generate
17100 a range of [last location start, end of function label]. */
17101
17102 if (cfun && crtl->has_bb_partition)
17103 {
17104 bool save_in_cold_section_p = in_cold_section_p;
17105 in_cold_section_p = first_function_block_is_cold;
17106 if (loc_list->last_before_switch == NULL)
17107 in_cold_section_p = !in_cold_section_p;
17108 secname = secname_for_decl (decl);
17109 in_cold_section_p = save_in_cold_section_p;
17110 }
17111 else
17112 secname = secname_for_decl (decl);
17113
17114 for (node = loc_list->first; node; node = node->next)
17115 {
17116 bool range_across_switch = false;
17117 if (GET_CODE (node->loc) == EXPR_LIST
17118 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17119 {
17120 if (GET_CODE (node->loc) == EXPR_LIST)
17121 {
17122 descr = NULL;
17123 /* This requires DW_OP_{,bit_}piece, which is not usable
17124 inside DWARF expressions. */
17125 if (want_address == 2)
17126 descr = dw_sra_loc_expr (decl, node->loc);
17127 }
17128 else
17129 {
17130 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17131 varloc = NOTE_VAR_LOCATION (node->loc);
17132 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17133 }
17134 if (descr)
17135 {
17136 /* If section switch happens in between node->label
17137 and node->next->label (or end of function) and
17138 we can't emit it as a single entry list,
17139 emit two ranges, first one ending at the end
17140 of first partition and second one starting at the
17141 beginning of second partition. */
17142 if (node == loc_list->last_before_switch
17143 && (node != loc_list->first || loc_list->first->next
17144 /* If we are to emit a view number, we will emit
17145 a loclist rather than a single location
17146 expression for the entire function (see
17147 loc_list_has_views), so we have to split the
17148 range that straddles across partitions. */
17149 || !ZERO_VIEW_P (node->view))
17150 && current_function_decl)
17151 {
17152 endname = cfun->fde->dw_fde_end;
17153 endview = 0;
17154 range_across_switch = true;
17155 }
17156 /* The variable has a location between NODE->LABEL and
17157 NODE->NEXT->LABEL. */
17158 else if (node->next)
17159 endname = node->next->label, endview = node->next->view;
17160 /* If the variable has a location at the last label
17161 it keeps its location until the end of function. */
17162 else if (!current_function_decl)
17163 endname = text_end_label, endview = 0;
17164 else
17165 {
17166 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17167 current_function_funcdef_no);
17168 endname = ggc_strdup (label_id);
17169 endview = 0;
17170 }
17171
17172 *listp = new_loc_list (descr, node->label, node->view,
17173 endname, endview, secname);
17174 if (TREE_CODE (decl) == PARM_DECL
17175 && node == loc_list->first
17176 && NOTE_P (node->loc)
17177 && strcmp (node->label, endname) == 0)
17178 (*listp)->force = true;
17179 listp = &(*listp)->dw_loc_next;
17180 }
17181 }
17182
17183 if (cfun
17184 && crtl->has_bb_partition
17185 && node == loc_list->last_before_switch)
17186 {
17187 bool save_in_cold_section_p = in_cold_section_p;
17188 in_cold_section_p = !first_function_block_is_cold;
17189 secname = secname_for_decl (decl);
17190 in_cold_section_p = save_in_cold_section_p;
17191 }
17192
17193 if (range_across_switch)
17194 {
17195 if (GET_CODE (node->loc) == EXPR_LIST)
17196 descr = dw_sra_loc_expr (decl, node->loc);
17197 else
17198 {
17199 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17200 varloc = NOTE_VAR_LOCATION (node->loc);
17201 descr = dw_loc_list_1 (decl, varloc, want_address,
17202 initialized);
17203 }
17204 gcc_assert (descr);
17205 /* The variable has a location between NODE->LABEL and
17206 NODE->NEXT->LABEL. */
17207 if (node->next)
17208 endname = node->next->label, endview = node->next->view;
17209 else
17210 endname = cfun->fde->dw_fde_second_end, endview = 0;
17211 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17212 endname, endview, secname);
17213 listp = &(*listp)->dw_loc_next;
17214 }
17215 }
17216
17217 /* Try to avoid the overhead of a location list emitting a location
17218 expression instead, but only if we didn't have more than one
17219 location entry in the first place. If some entries were not
17220 representable, we don't want to pretend a single entry that was
17221 applies to the entire scope in which the variable is
17222 available. */
17223 if (list && loc_list->first->next)
17224 gen_llsym (list);
17225 else
17226 maybe_gen_llsym (list);
17227
17228 return list;
17229 }
17230
17231 /* Return if the loc_list has only single element and thus can be represented
17232 as location description. */
17233
17234 static bool
17235 single_element_loc_list_p (dw_loc_list_ref list)
17236 {
17237 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17238 return !list->ll_symbol;
17239 }
17240
17241 /* Duplicate a single element of location list. */
17242
17243 static inline dw_loc_descr_ref
17244 copy_loc_descr (dw_loc_descr_ref ref)
17245 {
17246 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17247 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17248 return copy;
17249 }
17250
17251 /* To each location in list LIST append loc descr REF. */
17252
17253 static void
17254 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17255 {
17256 dw_loc_descr_ref copy;
17257 add_loc_descr (&list->expr, ref);
17258 list = list->dw_loc_next;
17259 while (list)
17260 {
17261 copy = copy_loc_descr (ref);
17262 add_loc_descr (&list->expr, copy);
17263 while (copy->dw_loc_next)
17264 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17265 list = list->dw_loc_next;
17266 }
17267 }
17268
17269 /* To each location in list LIST prepend loc descr REF. */
17270
17271 static void
17272 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17273 {
17274 dw_loc_descr_ref copy;
17275 dw_loc_descr_ref ref_end = list->expr;
17276 add_loc_descr (&ref, list->expr);
17277 list->expr = ref;
17278 list = list->dw_loc_next;
17279 while (list)
17280 {
17281 dw_loc_descr_ref end = list->expr;
17282 list->expr = copy = copy_loc_descr (ref);
17283 while (copy->dw_loc_next != ref_end)
17284 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17285 copy->dw_loc_next = end;
17286 list = list->dw_loc_next;
17287 }
17288 }
17289
17290 /* Given two lists RET and LIST
17291 produce location list that is result of adding expression in LIST
17292 to expression in RET on each position in program.
17293 Might be destructive on both RET and LIST.
17294
17295 TODO: We handle only simple cases of RET or LIST having at most one
17296 element. General case would involve sorting the lists in program order
17297 and merging them that will need some additional work.
17298 Adding that will improve quality of debug info especially for SRA-ed
17299 structures. */
17300
17301 static void
17302 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17303 {
17304 if (!list)
17305 return;
17306 if (!*ret)
17307 {
17308 *ret = list;
17309 return;
17310 }
17311 if (!list->dw_loc_next)
17312 {
17313 add_loc_descr_to_each (*ret, list->expr);
17314 return;
17315 }
17316 if (!(*ret)->dw_loc_next)
17317 {
17318 prepend_loc_descr_to_each (list, (*ret)->expr);
17319 *ret = list;
17320 return;
17321 }
17322 expansion_failed (NULL_TREE, NULL_RTX,
17323 "Don't know how to merge two non-trivial"
17324 " location lists.\n");
17325 *ret = NULL;
17326 return;
17327 }
17328
17329 /* LOC is constant expression. Try a luck, look it up in constant
17330 pool and return its loc_descr of its address. */
17331
17332 static dw_loc_descr_ref
17333 cst_pool_loc_descr (tree loc)
17334 {
17335 /* Get an RTL for this, if something has been emitted. */
17336 rtx rtl = lookup_constant_def (loc);
17337
17338 if (!rtl || !MEM_P (rtl))
17339 {
17340 gcc_assert (!rtl);
17341 return 0;
17342 }
17343 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17344
17345 /* TODO: We might get more coverage if we was actually delaying expansion
17346 of all expressions till end of compilation when constant pools are fully
17347 populated. */
17348 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17349 {
17350 expansion_failed (loc, NULL_RTX,
17351 "CST value in contant pool but not marked.");
17352 return 0;
17353 }
17354 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17355 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17356 }
17357
17358 /* Return dw_loc_list representing address of addr_expr LOC
17359 by looking for inner INDIRECT_REF expression and turning
17360 it into simple arithmetics.
17361
17362 See loc_list_from_tree for the meaning of CONTEXT. */
17363
17364 static dw_loc_list_ref
17365 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17366 loc_descr_context *context)
17367 {
17368 tree obj, offset;
17369 poly_int64 bitsize, bitpos, bytepos;
17370 machine_mode mode;
17371 int unsignedp, reversep, volatilep = 0;
17372 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17373
17374 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17375 &bitsize, &bitpos, &offset, &mode,
17376 &unsignedp, &reversep, &volatilep);
17377 STRIP_NOPS (obj);
17378 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17379 {
17380 expansion_failed (loc, NULL_RTX, "bitfield access");
17381 return 0;
17382 }
17383 if (!INDIRECT_REF_P (obj))
17384 {
17385 expansion_failed (obj,
17386 NULL_RTX, "no indirect ref in inner refrence");
17387 return 0;
17388 }
17389 if (!offset && known_eq (bitpos, 0))
17390 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17391 context);
17392 else if (toplev
17393 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17394 && (dwarf_version >= 4 || !dwarf_strict))
17395 {
17396 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17397 if (!list_ret)
17398 return 0;
17399 if (offset)
17400 {
17401 /* Variable offset. */
17402 list_ret1 = loc_list_from_tree (offset, 0, context);
17403 if (list_ret1 == 0)
17404 return 0;
17405 add_loc_list (&list_ret, list_ret1);
17406 if (!list_ret)
17407 return 0;
17408 add_loc_descr_to_each (list_ret,
17409 new_loc_descr (DW_OP_plus, 0, 0));
17410 }
17411 HOST_WIDE_INT value;
17412 if (bytepos.is_constant (&value) && value > 0)
17413 add_loc_descr_to_each (list_ret,
17414 new_loc_descr (DW_OP_plus_uconst, value, 0));
17415 else if (maybe_ne (bytepos, 0))
17416 loc_list_plus_const (list_ret, bytepos);
17417 add_loc_descr_to_each (list_ret,
17418 new_loc_descr (DW_OP_stack_value, 0, 0));
17419 }
17420 return list_ret;
17421 }
17422
17423 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17424 all operations from LOC are nops, move to the last one. Insert in NOPS all
17425 operations that are skipped. */
17426
17427 static void
17428 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17429 hash_set<dw_loc_descr_ref> &nops)
17430 {
17431 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17432 {
17433 nops.add (loc);
17434 loc = loc->dw_loc_next;
17435 }
17436 }
17437
17438 /* Helper for loc_descr_without_nops: free the location description operation
17439 P. */
17440
17441 bool
17442 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17443 {
17444 ggc_free (loc);
17445 return true;
17446 }
17447
17448 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17449 finishes LOC. */
17450
17451 static void
17452 loc_descr_without_nops (dw_loc_descr_ref &loc)
17453 {
17454 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17455 return;
17456
17457 /* Set of all DW_OP_nop operations we remove. */
17458 hash_set<dw_loc_descr_ref> nops;
17459
17460 /* First, strip all prefix NOP operations in order to keep the head of the
17461 operations list. */
17462 loc_descr_to_next_no_nop (loc, nops);
17463
17464 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17465 {
17466 /* For control flow operations: strip "prefix" nops in destination
17467 labels. */
17468 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17469 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17470 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17471 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17472
17473 /* Do the same for the operations that follow, then move to the next
17474 iteration. */
17475 if (cur->dw_loc_next != NULL)
17476 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17477 cur = cur->dw_loc_next;
17478 }
17479
17480 nops.traverse<void *, free_loc_descr> (NULL);
17481 }
17482
17483
17484 struct dwarf_procedure_info;
17485
17486 /* Helper structure for location descriptions generation. */
17487 struct loc_descr_context
17488 {
17489 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17490 NULL_TREE if DW_OP_push_object_address in invalid for this location
17491 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17492 tree context_type;
17493 /* The ..._DECL node that should be translated as a
17494 DW_OP_push_object_address operation. */
17495 tree base_decl;
17496 /* Information about the DWARF procedure we are currently generating. NULL if
17497 we are not generating a DWARF procedure. */
17498 struct dwarf_procedure_info *dpi;
17499 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17500 by consumer. Used for DW_TAG_generic_subrange attributes. */
17501 bool placeholder_arg;
17502 /* True if PLACEHOLDER_EXPR has been seen. */
17503 bool placeholder_seen;
17504 };
17505
17506 /* DWARF procedures generation
17507
17508 DWARF expressions (aka. location descriptions) are used to encode variable
17509 things such as sizes or offsets. Such computations can have redundant parts
17510 that can be factorized in order to reduce the size of the output debug
17511 information. This is the whole point of DWARF procedures.
17512
17513 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17514 already factorized into functions ("size functions") in order to handle very
17515 big and complex types. Such functions are quite simple: they have integral
17516 arguments, they return an integral result and their body contains only a
17517 return statement with arithmetic expressions. This is the only kind of
17518 function we are interested in translating into DWARF procedures, here.
17519
17520 DWARF expressions and DWARF procedure are executed using a stack, so we have
17521 to define some calling convention for them to interact. Let's say that:
17522
17523 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17524 all arguments in reverse order (right-to-left) so that when the DWARF
17525 procedure execution starts, the first argument is the top of the stack.
17526
17527 - Then, when returning, the DWARF procedure must have consumed all arguments
17528 on the stack, must have pushed the result and touched nothing else.
17529
17530 - Each integral argument and the result are integral types can be hold in a
17531 single stack slot.
17532
17533 - We call "frame offset" the number of stack slots that are "under DWARF
17534 procedure control": it includes the arguments slots, the temporaries and
17535 the result slot. Thus, it is equal to the number of arguments when the
17536 procedure execution starts and must be equal to one (the result) when it
17537 returns. */
17538
17539 /* Helper structure used when generating operations for a DWARF procedure. */
17540 struct dwarf_procedure_info
17541 {
17542 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17543 currently translated. */
17544 tree fndecl;
17545 /* The number of arguments FNDECL takes. */
17546 unsigned args_count;
17547 };
17548
17549 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17550 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17551 equate it to this DIE. */
17552
17553 static dw_die_ref
17554 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17555 dw_die_ref parent_die)
17556 {
17557 dw_die_ref dwarf_proc_die;
17558
17559 if ((dwarf_version < 3 && dwarf_strict)
17560 || location == NULL)
17561 return NULL;
17562
17563 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17564 if (fndecl)
17565 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17566 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17567 return dwarf_proc_die;
17568 }
17569
17570 /* Return whether TYPE is a supported type as a DWARF procedure argument
17571 type or return type (we handle only scalar types and pointer types that
17572 aren't wider than the DWARF expression evaluation stack. */
17573
17574 static bool
17575 is_handled_procedure_type (tree type)
17576 {
17577 return ((INTEGRAL_TYPE_P (type)
17578 || TREE_CODE (type) == OFFSET_TYPE
17579 || TREE_CODE (type) == POINTER_TYPE)
17580 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17581 }
17582
17583 /* Helper for resolve_args_picking: do the same but stop when coming across
17584 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17585 offset *before* evaluating the corresponding operation. */
17586
17587 static bool
17588 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17589 struct dwarf_procedure_info *dpi,
17590 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17591 {
17592 /* The "frame_offset" identifier is already used to name a macro... */
17593 unsigned frame_offset_ = initial_frame_offset;
17594 dw_loc_descr_ref l;
17595
17596 for (l = loc; l != NULL;)
17597 {
17598 bool existed;
17599 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17600
17601 /* If we already met this node, there is nothing to compute anymore. */
17602 if (existed)
17603 {
17604 /* Make sure that the stack size is consistent wherever the execution
17605 flow comes from. */
17606 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17607 break;
17608 }
17609 l_frame_offset = frame_offset_;
17610
17611 /* If needed, relocate the picking offset with respect to the frame
17612 offset. */
17613 if (l->frame_offset_rel)
17614 {
17615 unsigned HOST_WIDE_INT off;
17616 switch (l->dw_loc_opc)
17617 {
17618 case DW_OP_pick:
17619 off = l->dw_loc_oprnd1.v.val_unsigned;
17620 break;
17621 case DW_OP_dup:
17622 off = 0;
17623 break;
17624 case DW_OP_over:
17625 off = 1;
17626 break;
17627 default:
17628 gcc_unreachable ();
17629 }
17630 /* frame_offset_ is the size of the current stack frame, including
17631 incoming arguments. Besides, the arguments are pushed
17632 right-to-left. Thus, in order to access the Nth argument from
17633 this operation node, the picking has to skip temporaries *plus*
17634 one stack slot per argument (0 for the first one, 1 for the second
17635 one, etc.).
17636
17637 The targetted argument number (N) is already set as the operand,
17638 and the number of temporaries can be computed with:
17639 frame_offsets_ - dpi->args_count */
17640 off += frame_offset_ - dpi->args_count;
17641
17642 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17643 if (off > 255)
17644 return false;
17645
17646 if (off == 0)
17647 {
17648 l->dw_loc_opc = DW_OP_dup;
17649 l->dw_loc_oprnd1.v.val_unsigned = 0;
17650 }
17651 else if (off == 1)
17652 {
17653 l->dw_loc_opc = DW_OP_over;
17654 l->dw_loc_oprnd1.v.val_unsigned = 0;
17655 }
17656 else
17657 {
17658 l->dw_loc_opc = DW_OP_pick;
17659 l->dw_loc_oprnd1.v.val_unsigned = off;
17660 }
17661 }
17662
17663 /* Update frame_offset according to the effect the current operation has
17664 on the stack. */
17665 switch (l->dw_loc_opc)
17666 {
17667 case DW_OP_deref:
17668 case DW_OP_swap:
17669 case DW_OP_rot:
17670 case DW_OP_abs:
17671 case DW_OP_neg:
17672 case DW_OP_not:
17673 case DW_OP_plus_uconst:
17674 case DW_OP_skip:
17675 case DW_OP_reg0:
17676 case DW_OP_reg1:
17677 case DW_OP_reg2:
17678 case DW_OP_reg3:
17679 case DW_OP_reg4:
17680 case DW_OP_reg5:
17681 case DW_OP_reg6:
17682 case DW_OP_reg7:
17683 case DW_OP_reg8:
17684 case DW_OP_reg9:
17685 case DW_OP_reg10:
17686 case DW_OP_reg11:
17687 case DW_OP_reg12:
17688 case DW_OP_reg13:
17689 case DW_OP_reg14:
17690 case DW_OP_reg15:
17691 case DW_OP_reg16:
17692 case DW_OP_reg17:
17693 case DW_OP_reg18:
17694 case DW_OP_reg19:
17695 case DW_OP_reg20:
17696 case DW_OP_reg21:
17697 case DW_OP_reg22:
17698 case DW_OP_reg23:
17699 case DW_OP_reg24:
17700 case DW_OP_reg25:
17701 case DW_OP_reg26:
17702 case DW_OP_reg27:
17703 case DW_OP_reg28:
17704 case DW_OP_reg29:
17705 case DW_OP_reg30:
17706 case DW_OP_reg31:
17707 case DW_OP_bregx:
17708 case DW_OP_piece:
17709 case DW_OP_deref_size:
17710 case DW_OP_nop:
17711 case DW_OP_bit_piece:
17712 case DW_OP_implicit_value:
17713 case DW_OP_stack_value:
17714 break;
17715
17716 case DW_OP_addr:
17717 case DW_OP_const1u:
17718 case DW_OP_const1s:
17719 case DW_OP_const2u:
17720 case DW_OP_const2s:
17721 case DW_OP_const4u:
17722 case DW_OP_const4s:
17723 case DW_OP_const8u:
17724 case DW_OP_const8s:
17725 case DW_OP_constu:
17726 case DW_OP_consts:
17727 case DW_OP_dup:
17728 case DW_OP_over:
17729 case DW_OP_pick:
17730 case DW_OP_lit0:
17731 case DW_OP_lit1:
17732 case DW_OP_lit2:
17733 case DW_OP_lit3:
17734 case DW_OP_lit4:
17735 case DW_OP_lit5:
17736 case DW_OP_lit6:
17737 case DW_OP_lit7:
17738 case DW_OP_lit8:
17739 case DW_OP_lit9:
17740 case DW_OP_lit10:
17741 case DW_OP_lit11:
17742 case DW_OP_lit12:
17743 case DW_OP_lit13:
17744 case DW_OP_lit14:
17745 case DW_OP_lit15:
17746 case DW_OP_lit16:
17747 case DW_OP_lit17:
17748 case DW_OP_lit18:
17749 case DW_OP_lit19:
17750 case DW_OP_lit20:
17751 case DW_OP_lit21:
17752 case DW_OP_lit22:
17753 case DW_OP_lit23:
17754 case DW_OP_lit24:
17755 case DW_OP_lit25:
17756 case DW_OP_lit26:
17757 case DW_OP_lit27:
17758 case DW_OP_lit28:
17759 case DW_OP_lit29:
17760 case DW_OP_lit30:
17761 case DW_OP_lit31:
17762 case DW_OP_breg0:
17763 case DW_OP_breg1:
17764 case DW_OP_breg2:
17765 case DW_OP_breg3:
17766 case DW_OP_breg4:
17767 case DW_OP_breg5:
17768 case DW_OP_breg6:
17769 case DW_OP_breg7:
17770 case DW_OP_breg8:
17771 case DW_OP_breg9:
17772 case DW_OP_breg10:
17773 case DW_OP_breg11:
17774 case DW_OP_breg12:
17775 case DW_OP_breg13:
17776 case DW_OP_breg14:
17777 case DW_OP_breg15:
17778 case DW_OP_breg16:
17779 case DW_OP_breg17:
17780 case DW_OP_breg18:
17781 case DW_OP_breg19:
17782 case DW_OP_breg20:
17783 case DW_OP_breg21:
17784 case DW_OP_breg22:
17785 case DW_OP_breg23:
17786 case DW_OP_breg24:
17787 case DW_OP_breg25:
17788 case DW_OP_breg26:
17789 case DW_OP_breg27:
17790 case DW_OP_breg28:
17791 case DW_OP_breg29:
17792 case DW_OP_breg30:
17793 case DW_OP_breg31:
17794 case DW_OP_fbreg:
17795 case DW_OP_push_object_address:
17796 case DW_OP_call_frame_cfa:
17797 case DW_OP_GNU_variable_value:
17798 ++frame_offset_;
17799 break;
17800
17801 case DW_OP_drop:
17802 case DW_OP_xderef:
17803 case DW_OP_and:
17804 case DW_OP_div:
17805 case DW_OP_minus:
17806 case DW_OP_mod:
17807 case DW_OP_mul:
17808 case DW_OP_or:
17809 case DW_OP_plus:
17810 case DW_OP_shl:
17811 case DW_OP_shr:
17812 case DW_OP_shra:
17813 case DW_OP_xor:
17814 case DW_OP_bra:
17815 case DW_OP_eq:
17816 case DW_OP_ge:
17817 case DW_OP_gt:
17818 case DW_OP_le:
17819 case DW_OP_lt:
17820 case DW_OP_ne:
17821 case DW_OP_regx:
17822 case DW_OP_xderef_size:
17823 --frame_offset_;
17824 break;
17825
17826 case DW_OP_call2:
17827 case DW_OP_call4:
17828 case DW_OP_call_ref:
17829 {
17830 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17831 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17832
17833 if (stack_usage == NULL)
17834 return false;
17835 frame_offset_ += *stack_usage;
17836 break;
17837 }
17838
17839 case DW_OP_implicit_pointer:
17840 case DW_OP_entry_value:
17841 case DW_OP_const_type:
17842 case DW_OP_regval_type:
17843 case DW_OP_deref_type:
17844 case DW_OP_convert:
17845 case DW_OP_reinterpret:
17846 case DW_OP_form_tls_address:
17847 case DW_OP_GNU_push_tls_address:
17848 case DW_OP_GNU_uninit:
17849 case DW_OP_GNU_encoded_addr:
17850 case DW_OP_GNU_implicit_pointer:
17851 case DW_OP_GNU_entry_value:
17852 case DW_OP_GNU_const_type:
17853 case DW_OP_GNU_regval_type:
17854 case DW_OP_GNU_deref_type:
17855 case DW_OP_GNU_convert:
17856 case DW_OP_GNU_reinterpret:
17857 case DW_OP_GNU_parameter_ref:
17858 /* loc_list_from_tree will probably not output these operations for
17859 size functions, so assume they will not appear here. */
17860 /* Fall through... */
17861
17862 default:
17863 gcc_unreachable ();
17864 }
17865
17866 /* Now, follow the control flow (except subroutine calls). */
17867 switch (l->dw_loc_opc)
17868 {
17869 case DW_OP_bra:
17870 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17871 frame_offsets))
17872 return false;
17873 /* Fall through. */
17874
17875 case DW_OP_skip:
17876 l = l->dw_loc_oprnd1.v.val_loc;
17877 break;
17878
17879 case DW_OP_stack_value:
17880 return true;
17881
17882 default:
17883 l = l->dw_loc_next;
17884 break;
17885 }
17886 }
17887
17888 return true;
17889 }
17890
17891 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17892 operations) in order to resolve the operand of DW_OP_pick operations that
17893 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17894 offset *before* LOC is executed. Return if all relocations were
17895 successful. */
17896
17897 static bool
17898 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17899 struct dwarf_procedure_info *dpi)
17900 {
17901 /* Associate to all visited operations the frame offset *before* evaluating
17902 this operation. */
17903 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17904
17905 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17906 frame_offsets);
17907 }
17908
17909 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17910 Return NULL if it is not possible. */
17911
17912 static dw_die_ref
17913 function_to_dwarf_procedure (tree fndecl)
17914 {
17915 struct loc_descr_context ctx;
17916 struct dwarf_procedure_info dpi;
17917 dw_die_ref dwarf_proc_die;
17918 tree tree_body = DECL_SAVED_TREE (fndecl);
17919 dw_loc_descr_ref loc_body, epilogue;
17920
17921 tree cursor;
17922 unsigned i;
17923
17924 /* Do not generate multiple DWARF procedures for the same function
17925 declaration. */
17926 dwarf_proc_die = lookup_decl_die (fndecl);
17927 if (dwarf_proc_die != NULL)
17928 return dwarf_proc_die;
17929
17930 /* DWARF procedures are available starting with the DWARFv3 standard. */
17931 if (dwarf_version < 3 && dwarf_strict)
17932 return NULL;
17933
17934 /* We handle only functions for which we still have a body, that return a
17935 supported type and that takes arguments with supported types. Note that
17936 there is no point translating functions that return nothing. */
17937 if (tree_body == NULL_TREE
17938 || DECL_RESULT (fndecl) == NULL_TREE
17939 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17940 return NULL;
17941
17942 for (cursor = DECL_ARGUMENTS (fndecl);
17943 cursor != NULL_TREE;
17944 cursor = TREE_CHAIN (cursor))
17945 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17946 return NULL;
17947
17948 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17949 if (TREE_CODE (tree_body) != RETURN_EXPR)
17950 return NULL;
17951 tree_body = TREE_OPERAND (tree_body, 0);
17952 if (TREE_CODE (tree_body) != MODIFY_EXPR
17953 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17954 return NULL;
17955 tree_body = TREE_OPERAND (tree_body, 1);
17956
17957 /* Try to translate the body expression itself. Note that this will probably
17958 cause an infinite recursion if its call graph has a cycle. This is very
17959 unlikely for size functions, however, so don't bother with such things at
17960 the moment. */
17961 ctx.context_type = NULL_TREE;
17962 ctx.base_decl = NULL_TREE;
17963 ctx.dpi = &dpi;
17964 ctx.placeholder_arg = false;
17965 ctx.placeholder_seen = false;
17966 dpi.fndecl = fndecl;
17967 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17968 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17969 if (!loc_body)
17970 return NULL;
17971
17972 /* After evaluating all operands in "loc_body", we should still have on the
17973 stack all arguments plus the desired function result (top of the stack).
17974 Generate code in order to keep only the result in our stack frame. */
17975 epilogue = NULL;
17976 for (i = 0; i < dpi.args_count; ++i)
17977 {
17978 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17979 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17980 op_couple->dw_loc_next->dw_loc_next = epilogue;
17981 epilogue = op_couple;
17982 }
17983 add_loc_descr (&loc_body, epilogue);
17984 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17985 return NULL;
17986
17987 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17988 because they are considered useful. Now there is an epilogue, they are
17989 not anymore, so give it another try. */
17990 loc_descr_without_nops (loc_body);
17991
17992 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17993 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17994 though, given that size functions do not come from source, so they should
17995 not have a dedicated DW_TAG_subprogram DIE. */
17996 dwarf_proc_die
17997 = new_dwarf_proc_die (loc_body, fndecl,
17998 get_context_die (DECL_CONTEXT (fndecl)));
17999
18000 /* The called DWARF procedure consumes one stack slot per argument and
18001 returns one stack slot. */
18002 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18003
18004 return dwarf_proc_die;
18005 }
18006
18007
18008 /* Generate Dwarf location list representing LOC.
18009 If WANT_ADDRESS is false, expression computing LOC will be computed
18010 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18011 if WANT_ADDRESS is 2, expression computing address useable in location
18012 will be returned (i.e. DW_OP_reg can be used
18013 to refer to register values).
18014
18015 CONTEXT provides information to customize the location descriptions
18016 generation. Its context_type field specifies what type is implicitly
18017 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18018 will not be generated.
18019
18020 Its DPI field determines whether we are generating a DWARF expression for a
18021 DWARF procedure, so PARM_DECL references are processed specifically.
18022
18023 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18024 and dpi fields were null. */
18025
18026 static dw_loc_list_ref
18027 loc_list_from_tree_1 (tree loc, int want_address,
18028 struct loc_descr_context *context)
18029 {
18030 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18031 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18032 int have_address = 0;
18033 enum dwarf_location_atom op;
18034
18035 /* ??? Most of the time we do not take proper care for sign/zero
18036 extending the values properly. Hopefully this won't be a real
18037 problem... */
18038
18039 if (context != NULL
18040 && context->base_decl == loc
18041 && want_address == 0)
18042 {
18043 if (dwarf_version >= 3 || !dwarf_strict)
18044 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18045 NULL, 0, NULL, 0, NULL);
18046 else
18047 return NULL;
18048 }
18049
18050 switch (TREE_CODE (loc))
18051 {
18052 case ERROR_MARK:
18053 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18054 return 0;
18055
18056 case PLACEHOLDER_EXPR:
18057 /* This case involves extracting fields from an object to determine the
18058 position of other fields. It is supposed to appear only as the first
18059 operand of COMPONENT_REF nodes and to reference precisely the type
18060 that the context allows. */
18061 if (context != NULL
18062 && TREE_TYPE (loc) == context->context_type
18063 && want_address >= 1)
18064 {
18065 if (dwarf_version >= 3 || !dwarf_strict)
18066 {
18067 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18068 have_address = 1;
18069 break;
18070 }
18071 else
18072 return NULL;
18073 }
18074 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18075 the single argument passed by consumer. */
18076 else if (context != NULL
18077 && context->placeholder_arg
18078 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18079 && want_address == 0)
18080 {
18081 ret = new_loc_descr (DW_OP_pick, 0, 0);
18082 ret->frame_offset_rel = 1;
18083 context->placeholder_seen = true;
18084 break;
18085 }
18086 else
18087 expansion_failed (loc, NULL_RTX,
18088 "PLACEHOLDER_EXPR for an unexpected type");
18089 break;
18090
18091 case CALL_EXPR:
18092 {
18093 const int nargs = call_expr_nargs (loc);
18094 tree callee = get_callee_fndecl (loc);
18095 int i;
18096 dw_die_ref dwarf_proc;
18097
18098 if (callee == NULL_TREE)
18099 goto call_expansion_failed;
18100
18101 /* We handle only functions that return an integer. */
18102 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18103 goto call_expansion_failed;
18104
18105 dwarf_proc = function_to_dwarf_procedure (callee);
18106 if (dwarf_proc == NULL)
18107 goto call_expansion_failed;
18108
18109 /* Evaluate arguments right-to-left so that the first argument will
18110 be the top-most one on the stack. */
18111 for (i = nargs - 1; i >= 0; --i)
18112 {
18113 dw_loc_descr_ref loc_descr
18114 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18115 context);
18116
18117 if (loc_descr == NULL)
18118 goto call_expansion_failed;
18119
18120 add_loc_descr (&ret, loc_descr);
18121 }
18122
18123 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18124 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18125 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18126 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18127 add_loc_descr (&ret, ret1);
18128 break;
18129
18130 call_expansion_failed:
18131 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18132 /* There are no opcodes for these operations. */
18133 return 0;
18134 }
18135
18136 case PREINCREMENT_EXPR:
18137 case PREDECREMENT_EXPR:
18138 case POSTINCREMENT_EXPR:
18139 case POSTDECREMENT_EXPR:
18140 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18141 /* There are no opcodes for these operations. */
18142 return 0;
18143
18144 case ADDR_EXPR:
18145 /* If we already want an address, see if there is INDIRECT_REF inside
18146 e.g. for &this->field. */
18147 if (want_address)
18148 {
18149 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18150 (loc, want_address == 2, context);
18151 if (list_ret)
18152 have_address = 1;
18153 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18154 && (ret = cst_pool_loc_descr (loc)))
18155 have_address = 1;
18156 }
18157 /* Otherwise, process the argument and look for the address. */
18158 if (!list_ret && !ret)
18159 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18160 else
18161 {
18162 if (want_address)
18163 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18164 return NULL;
18165 }
18166 break;
18167
18168 case VAR_DECL:
18169 if (DECL_THREAD_LOCAL_P (loc))
18170 {
18171 rtx rtl;
18172 enum dwarf_location_atom tls_op;
18173 enum dtprel_bool dtprel = dtprel_false;
18174
18175 if (targetm.have_tls)
18176 {
18177 /* If this is not defined, we have no way to emit the
18178 data. */
18179 if (!targetm.asm_out.output_dwarf_dtprel)
18180 return 0;
18181
18182 /* The way DW_OP_GNU_push_tls_address is specified, we
18183 can only look up addresses of objects in the current
18184 module. We used DW_OP_addr as first op, but that's
18185 wrong, because DW_OP_addr is relocated by the debug
18186 info consumer, while DW_OP_GNU_push_tls_address
18187 operand shouldn't be. */
18188 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18189 return 0;
18190 dtprel = dtprel_true;
18191 /* We check for DWARF 5 here because gdb did not implement
18192 DW_OP_form_tls_address until after 7.12. */
18193 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18194 : DW_OP_GNU_push_tls_address);
18195 }
18196 else
18197 {
18198 if (!targetm.emutls.debug_form_tls_address
18199 || !(dwarf_version >= 3 || !dwarf_strict))
18200 return 0;
18201 /* We stuffed the control variable into the DECL_VALUE_EXPR
18202 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18203 no longer appear in gimple code. We used the control
18204 variable in specific so that we could pick it up here. */
18205 loc = DECL_VALUE_EXPR (loc);
18206 tls_op = DW_OP_form_tls_address;
18207 }
18208
18209 rtl = rtl_for_decl_location (loc);
18210 if (rtl == NULL_RTX)
18211 return 0;
18212
18213 if (!MEM_P (rtl))
18214 return 0;
18215 rtl = XEXP (rtl, 0);
18216 if (! CONSTANT_P (rtl))
18217 return 0;
18218
18219 ret = new_addr_loc_descr (rtl, dtprel);
18220 ret1 = new_loc_descr (tls_op, 0, 0);
18221 add_loc_descr (&ret, ret1);
18222
18223 have_address = 1;
18224 break;
18225 }
18226 /* FALLTHRU */
18227
18228 case PARM_DECL:
18229 if (context != NULL && context->dpi != NULL
18230 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18231 {
18232 /* We are generating code for a DWARF procedure and we want to access
18233 one of its arguments: find the appropriate argument offset and let
18234 the resolve_args_picking pass compute the offset that complies
18235 with the stack frame size. */
18236 unsigned i = 0;
18237 tree cursor;
18238
18239 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18240 cursor != NULL_TREE && cursor != loc;
18241 cursor = TREE_CHAIN (cursor), ++i)
18242 ;
18243 /* If we are translating a DWARF procedure, all referenced parameters
18244 must belong to the current function. */
18245 gcc_assert (cursor != NULL_TREE);
18246
18247 ret = new_loc_descr (DW_OP_pick, i, 0);
18248 ret->frame_offset_rel = 1;
18249 break;
18250 }
18251 /* FALLTHRU */
18252
18253 case RESULT_DECL:
18254 if (DECL_HAS_VALUE_EXPR_P (loc))
18255 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18256 want_address, context);
18257 /* FALLTHRU */
18258
18259 case FUNCTION_DECL:
18260 {
18261 rtx rtl;
18262 var_loc_list *loc_list = lookup_decl_loc (loc);
18263
18264 if (loc_list && loc_list->first)
18265 {
18266 list_ret = dw_loc_list (loc_list, loc, want_address);
18267 have_address = want_address != 0;
18268 break;
18269 }
18270 rtl = rtl_for_decl_location (loc);
18271 if (rtl == NULL_RTX)
18272 {
18273 if (TREE_CODE (loc) != FUNCTION_DECL
18274 && early_dwarf
18275 && current_function_decl
18276 && want_address != 1
18277 && ! DECL_IGNORED_P (loc)
18278 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18279 || POINTER_TYPE_P (TREE_TYPE (loc)))
18280 && DECL_CONTEXT (loc) == current_function_decl
18281 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18282 <= DWARF2_ADDR_SIZE))
18283 {
18284 dw_die_ref ref = lookup_decl_die (loc);
18285 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18286 if (ref)
18287 {
18288 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18289 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18290 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18291 }
18292 else
18293 {
18294 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18295 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18296 }
18297 break;
18298 }
18299 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18300 return 0;
18301 }
18302 else if (CONST_INT_P (rtl))
18303 {
18304 HOST_WIDE_INT val = INTVAL (rtl);
18305 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18306 val &= GET_MODE_MASK (DECL_MODE (loc));
18307 ret = int_loc_descriptor (val);
18308 }
18309 else if (GET_CODE (rtl) == CONST_STRING)
18310 {
18311 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18312 return 0;
18313 }
18314 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18315 ret = new_addr_loc_descr (rtl, dtprel_false);
18316 else
18317 {
18318 machine_mode mode, mem_mode;
18319
18320 /* Certain constructs can only be represented at top-level. */
18321 if (want_address == 2)
18322 {
18323 ret = loc_descriptor (rtl, VOIDmode,
18324 VAR_INIT_STATUS_INITIALIZED);
18325 have_address = 1;
18326 }
18327 else
18328 {
18329 mode = GET_MODE (rtl);
18330 mem_mode = VOIDmode;
18331 if (MEM_P (rtl))
18332 {
18333 mem_mode = mode;
18334 mode = get_address_mode (rtl);
18335 rtl = XEXP (rtl, 0);
18336 have_address = 1;
18337 }
18338 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18339 VAR_INIT_STATUS_INITIALIZED);
18340 }
18341 if (!ret)
18342 expansion_failed (loc, rtl,
18343 "failed to produce loc descriptor for rtl");
18344 }
18345 }
18346 break;
18347
18348 case MEM_REF:
18349 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18350 {
18351 have_address = 1;
18352 goto do_plus;
18353 }
18354 /* Fallthru. */
18355 case INDIRECT_REF:
18356 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18357 have_address = 1;
18358 break;
18359
18360 case TARGET_MEM_REF:
18361 case SSA_NAME:
18362 case DEBUG_EXPR_DECL:
18363 return NULL;
18364
18365 case COMPOUND_EXPR:
18366 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18367 context);
18368
18369 CASE_CONVERT:
18370 case VIEW_CONVERT_EXPR:
18371 case SAVE_EXPR:
18372 case MODIFY_EXPR:
18373 case NON_LVALUE_EXPR:
18374 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18375 context);
18376
18377 case COMPONENT_REF:
18378 case BIT_FIELD_REF:
18379 case ARRAY_REF:
18380 case ARRAY_RANGE_REF:
18381 case REALPART_EXPR:
18382 case IMAGPART_EXPR:
18383 {
18384 tree obj, offset;
18385 poly_int64 bitsize, bitpos, bytepos;
18386 machine_mode mode;
18387 int unsignedp, reversep, volatilep = 0;
18388
18389 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18390 &unsignedp, &reversep, &volatilep);
18391
18392 gcc_assert (obj != loc);
18393
18394 list_ret = loc_list_from_tree_1 (obj,
18395 want_address == 2
18396 && known_eq (bitpos, 0)
18397 && !offset ? 2 : 1,
18398 context);
18399 /* TODO: We can extract value of the small expression via shifting even
18400 for nonzero bitpos. */
18401 if (list_ret == 0)
18402 return 0;
18403 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18404 || !multiple_p (bitsize, BITS_PER_UNIT))
18405 {
18406 expansion_failed (loc, NULL_RTX,
18407 "bitfield access");
18408 return 0;
18409 }
18410
18411 if (offset != NULL_TREE)
18412 {
18413 /* Variable offset. */
18414 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18415 if (list_ret1 == 0)
18416 return 0;
18417 add_loc_list (&list_ret, list_ret1);
18418 if (!list_ret)
18419 return 0;
18420 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18421 }
18422
18423 HOST_WIDE_INT value;
18424 if (bytepos.is_constant (&value) && value > 0)
18425 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18426 value, 0));
18427 else if (maybe_ne (bytepos, 0))
18428 loc_list_plus_const (list_ret, bytepos);
18429
18430 have_address = 1;
18431 break;
18432 }
18433
18434 case INTEGER_CST:
18435 if ((want_address || !tree_fits_shwi_p (loc))
18436 && (ret = cst_pool_loc_descr (loc)))
18437 have_address = 1;
18438 else if (want_address == 2
18439 && tree_fits_shwi_p (loc)
18440 && (ret = address_of_int_loc_descriptor
18441 (int_size_in_bytes (TREE_TYPE (loc)),
18442 tree_to_shwi (loc))))
18443 have_address = 1;
18444 else if (tree_fits_shwi_p (loc))
18445 ret = int_loc_descriptor (tree_to_shwi (loc));
18446 else if (tree_fits_uhwi_p (loc))
18447 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18448 else
18449 {
18450 expansion_failed (loc, NULL_RTX,
18451 "Integer operand is not host integer");
18452 return 0;
18453 }
18454 break;
18455
18456 case CONSTRUCTOR:
18457 case REAL_CST:
18458 case STRING_CST:
18459 case COMPLEX_CST:
18460 if ((ret = cst_pool_loc_descr (loc)))
18461 have_address = 1;
18462 else if (TREE_CODE (loc) == CONSTRUCTOR)
18463 {
18464 tree type = TREE_TYPE (loc);
18465 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18466 unsigned HOST_WIDE_INT offset = 0;
18467 unsigned HOST_WIDE_INT cnt;
18468 constructor_elt *ce;
18469
18470 if (TREE_CODE (type) == RECORD_TYPE)
18471 {
18472 /* This is very limited, but it's enough to output
18473 pointers to member functions, as long as the
18474 referenced function is defined in the current
18475 translation unit. */
18476 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18477 {
18478 tree val = ce->value;
18479
18480 tree field = ce->index;
18481
18482 if (val)
18483 STRIP_NOPS (val);
18484
18485 if (!field || DECL_BIT_FIELD (field))
18486 {
18487 expansion_failed (loc, NULL_RTX,
18488 "bitfield in record type constructor");
18489 size = offset = (unsigned HOST_WIDE_INT)-1;
18490 ret = NULL;
18491 break;
18492 }
18493
18494 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18495 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18496 gcc_assert (pos + fieldsize <= size);
18497 if (pos < offset)
18498 {
18499 expansion_failed (loc, NULL_RTX,
18500 "out-of-order fields in record constructor");
18501 size = offset = (unsigned HOST_WIDE_INT)-1;
18502 ret = NULL;
18503 break;
18504 }
18505 if (pos > offset)
18506 {
18507 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18508 add_loc_descr (&ret, ret1);
18509 offset = pos;
18510 }
18511 if (val && fieldsize != 0)
18512 {
18513 ret1 = loc_descriptor_from_tree (val, want_address, context);
18514 if (!ret1)
18515 {
18516 expansion_failed (loc, NULL_RTX,
18517 "unsupported expression in field");
18518 size = offset = (unsigned HOST_WIDE_INT)-1;
18519 ret = NULL;
18520 break;
18521 }
18522 add_loc_descr (&ret, ret1);
18523 }
18524 if (fieldsize)
18525 {
18526 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18527 add_loc_descr (&ret, ret1);
18528 offset = pos + fieldsize;
18529 }
18530 }
18531
18532 if (offset != size)
18533 {
18534 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18535 add_loc_descr (&ret, ret1);
18536 offset = size;
18537 }
18538
18539 have_address = !!want_address;
18540 }
18541 else
18542 expansion_failed (loc, NULL_RTX,
18543 "constructor of non-record type");
18544 }
18545 else
18546 /* We can construct small constants here using int_loc_descriptor. */
18547 expansion_failed (loc, NULL_RTX,
18548 "constructor or constant not in constant pool");
18549 break;
18550
18551 case TRUTH_AND_EXPR:
18552 case TRUTH_ANDIF_EXPR:
18553 case BIT_AND_EXPR:
18554 op = DW_OP_and;
18555 goto do_binop;
18556
18557 case TRUTH_XOR_EXPR:
18558 case BIT_XOR_EXPR:
18559 op = DW_OP_xor;
18560 goto do_binop;
18561
18562 case TRUTH_OR_EXPR:
18563 case TRUTH_ORIF_EXPR:
18564 case BIT_IOR_EXPR:
18565 op = DW_OP_or;
18566 goto do_binop;
18567
18568 case FLOOR_DIV_EXPR:
18569 case CEIL_DIV_EXPR:
18570 case ROUND_DIV_EXPR:
18571 case TRUNC_DIV_EXPR:
18572 case EXACT_DIV_EXPR:
18573 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18574 return 0;
18575 op = DW_OP_div;
18576 goto do_binop;
18577
18578 case MINUS_EXPR:
18579 op = DW_OP_minus;
18580 goto do_binop;
18581
18582 case FLOOR_MOD_EXPR:
18583 case CEIL_MOD_EXPR:
18584 case ROUND_MOD_EXPR:
18585 case TRUNC_MOD_EXPR:
18586 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18587 {
18588 op = DW_OP_mod;
18589 goto do_binop;
18590 }
18591 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18592 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18593 if (list_ret == 0 || list_ret1 == 0)
18594 return 0;
18595
18596 add_loc_list (&list_ret, list_ret1);
18597 if (list_ret == 0)
18598 return 0;
18599 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18600 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18601 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18602 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18603 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18604 break;
18605
18606 case MULT_EXPR:
18607 op = DW_OP_mul;
18608 goto do_binop;
18609
18610 case LSHIFT_EXPR:
18611 op = DW_OP_shl;
18612 goto do_binop;
18613
18614 case RSHIFT_EXPR:
18615 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18616 goto do_binop;
18617
18618 case POINTER_PLUS_EXPR:
18619 case PLUS_EXPR:
18620 do_plus:
18621 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18622 {
18623 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18624 smarter to encode their opposite. The DW_OP_plus_uconst operation
18625 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18626 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18627 bytes, Y being the size of the operation that pushes the opposite
18628 of the addend. So let's choose the smallest representation. */
18629 const tree tree_addend = TREE_OPERAND (loc, 1);
18630 offset_int wi_addend;
18631 HOST_WIDE_INT shwi_addend;
18632 dw_loc_descr_ref loc_naddend;
18633
18634 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18635 if (list_ret == 0)
18636 return 0;
18637
18638 /* Try to get the literal to push. It is the opposite of the addend,
18639 so as we rely on wrapping during DWARF evaluation, first decode
18640 the literal as a "DWARF-sized" signed number. */
18641 wi_addend = wi::to_offset (tree_addend);
18642 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18643 shwi_addend = wi_addend.to_shwi ();
18644 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18645 ? int_loc_descriptor (-shwi_addend)
18646 : NULL;
18647
18648 if (loc_naddend != NULL
18649 && ((unsigned) size_of_uleb128 (shwi_addend)
18650 > size_of_loc_descr (loc_naddend)))
18651 {
18652 add_loc_descr_to_each (list_ret, loc_naddend);
18653 add_loc_descr_to_each (list_ret,
18654 new_loc_descr (DW_OP_minus, 0, 0));
18655 }
18656 else
18657 {
18658 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18659 {
18660 loc_naddend = loc_cur;
18661 loc_cur = loc_cur->dw_loc_next;
18662 ggc_free (loc_naddend);
18663 }
18664 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18665 }
18666 break;
18667 }
18668
18669 op = DW_OP_plus;
18670 goto do_binop;
18671
18672 case LE_EXPR:
18673 op = DW_OP_le;
18674 goto do_comp_binop;
18675
18676 case GE_EXPR:
18677 op = DW_OP_ge;
18678 goto do_comp_binop;
18679
18680 case LT_EXPR:
18681 op = DW_OP_lt;
18682 goto do_comp_binop;
18683
18684 case GT_EXPR:
18685 op = DW_OP_gt;
18686 goto do_comp_binop;
18687
18688 do_comp_binop:
18689 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18690 {
18691 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18692 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18693 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18694 TREE_CODE (loc));
18695 break;
18696 }
18697 else
18698 goto do_binop;
18699
18700 case EQ_EXPR:
18701 op = DW_OP_eq;
18702 goto do_binop;
18703
18704 case NE_EXPR:
18705 op = DW_OP_ne;
18706 goto do_binop;
18707
18708 do_binop:
18709 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18710 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18711 if (list_ret == 0 || list_ret1 == 0)
18712 return 0;
18713
18714 add_loc_list (&list_ret, list_ret1);
18715 if (list_ret == 0)
18716 return 0;
18717 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18718 break;
18719
18720 case TRUTH_NOT_EXPR:
18721 case BIT_NOT_EXPR:
18722 op = DW_OP_not;
18723 goto do_unop;
18724
18725 case ABS_EXPR:
18726 op = DW_OP_abs;
18727 goto do_unop;
18728
18729 case NEGATE_EXPR:
18730 op = DW_OP_neg;
18731 goto do_unop;
18732
18733 do_unop:
18734 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18735 if (list_ret == 0)
18736 return 0;
18737
18738 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18739 break;
18740
18741 case MIN_EXPR:
18742 case MAX_EXPR:
18743 {
18744 const enum tree_code code =
18745 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18746
18747 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18748 build2 (code, integer_type_node,
18749 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18750 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18751 }
18752
18753 /* fall through */
18754
18755 case COND_EXPR:
18756 {
18757 dw_loc_descr_ref lhs
18758 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18759 dw_loc_list_ref rhs
18760 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18761 dw_loc_descr_ref bra_node, jump_node, tmp;
18762
18763 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18764 if (list_ret == 0 || lhs == 0 || rhs == 0)
18765 return 0;
18766
18767 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18768 add_loc_descr_to_each (list_ret, bra_node);
18769
18770 add_loc_list (&list_ret, rhs);
18771 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18772 add_loc_descr_to_each (list_ret, jump_node);
18773
18774 add_loc_descr_to_each (list_ret, lhs);
18775 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18776 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18777
18778 /* ??? Need a node to point the skip at. Use a nop. */
18779 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18780 add_loc_descr_to_each (list_ret, tmp);
18781 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18782 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18783 }
18784 break;
18785
18786 case FIX_TRUNC_EXPR:
18787 return 0;
18788
18789 default:
18790 /* Leave front-end specific codes as simply unknown. This comes
18791 up, for instance, with the C STMT_EXPR. */
18792 if ((unsigned int) TREE_CODE (loc)
18793 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18794 {
18795 expansion_failed (loc, NULL_RTX,
18796 "language specific tree node");
18797 return 0;
18798 }
18799
18800 /* Otherwise this is a generic code; we should just lists all of
18801 these explicitly. We forgot one. */
18802 if (flag_checking)
18803 gcc_unreachable ();
18804
18805 /* In a release build, we want to degrade gracefully: better to
18806 generate incomplete debugging information than to crash. */
18807 return NULL;
18808 }
18809
18810 if (!ret && !list_ret)
18811 return 0;
18812
18813 if (want_address == 2 && !have_address
18814 && (dwarf_version >= 4 || !dwarf_strict))
18815 {
18816 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18817 {
18818 expansion_failed (loc, NULL_RTX,
18819 "DWARF address size mismatch");
18820 return 0;
18821 }
18822 if (ret)
18823 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18824 else
18825 add_loc_descr_to_each (list_ret,
18826 new_loc_descr (DW_OP_stack_value, 0, 0));
18827 have_address = 1;
18828 }
18829 /* Show if we can't fill the request for an address. */
18830 if (want_address && !have_address)
18831 {
18832 expansion_failed (loc, NULL_RTX,
18833 "Want address and only have value");
18834 return 0;
18835 }
18836
18837 gcc_assert (!ret || !list_ret);
18838
18839 /* If we've got an address and don't want one, dereference. */
18840 if (!want_address && have_address)
18841 {
18842 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18843
18844 if (size > DWARF2_ADDR_SIZE || size == -1)
18845 {
18846 expansion_failed (loc, NULL_RTX,
18847 "DWARF address size mismatch");
18848 return 0;
18849 }
18850 else if (size == DWARF2_ADDR_SIZE)
18851 op = DW_OP_deref;
18852 else
18853 op = DW_OP_deref_size;
18854
18855 if (ret)
18856 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18857 else
18858 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18859 }
18860 if (ret)
18861 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18862
18863 return list_ret;
18864 }
18865
18866 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18867 expressions. */
18868
18869 static dw_loc_list_ref
18870 loc_list_from_tree (tree loc, int want_address,
18871 struct loc_descr_context *context)
18872 {
18873 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18874
18875 for (dw_loc_list_ref loc_cur = result;
18876 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18877 loc_descr_without_nops (loc_cur->expr);
18878 return result;
18879 }
18880
18881 /* Same as above but return only single location expression. */
18882 static dw_loc_descr_ref
18883 loc_descriptor_from_tree (tree loc, int want_address,
18884 struct loc_descr_context *context)
18885 {
18886 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18887 if (!ret)
18888 return NULL;
18889 if (ret->dw_loc_next)
18890 {
18891 expansion_failed (loc, NULL_RTX,
18892 "Location list where only loc descriptor needed");
18893 return NULL;
18894 }
18895 return ret->expr;
18896 }
18897
18898 /* Given a value, round it up to the lowest multiple of `boundary'
18899 which is not less than the value itself. */
18900
18901 static inline HOST_WIDE_INT
18902 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18903 {
18904 return (((value + boundary - 1) / boundary) * boundary);
18905 }
18906
18907 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18908 pointer to the declared type for the relevant field variable, or return
18909 `integer_type_node' if the given node turns out to be an
18910 ERROR_MARK node. */
18911
18912 static inline tree
18913 field_type (const_tree decl)
18914 {
18915 tree type;
18916
18917 if (TREE_CODE (decl) == ERROR_MARK)
18918 return integer_type_node;
18919
18920 type = DECL_BIT_FIELD_TYPE (decl);
18921 if (type == NULL_TREE)
18922 type = TREE_TYPE (decl);
18923
18924 return type;
18925 }
18926
18927 /* Given a pointer to a tree node, return the alignment in bits for
18928 it, or else return BITS_PER_WORD if the node actually turns out to
18929 be an ERROR_MARK node. */
18930
18931 static inline unsigned
18932 simple_type_align_in_bits (const_tree type)
18933 {
18934 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18935 }
18936
18937 static inline unsigned
18938 simple_decl_align_in_bits (const_tree decl)
18939 {
18940 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18941 }
18942
18943 /* Return the result of rounding T up to ALIGN. */
18944
18945 static inline offset_int
18946 round_up_to_align (const offset_int &t, unsigned int align)
18947 {
18948 return wi::udiv_trunc (t + align - 1, align) * align;
18949 }
18950
18951 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18952 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18953 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18954 if we fail to return the size in one of these two forms. */
18955
18956 static dw_loc_descr_ref
18957 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18958 {
18959 tree tree_size;
18960 struct loc_descr_context ctx;
18961
18962 /* Return a constant integer in priority, if possible. */
18963 *cst_size = int_size_in_bytes (type);
18964 if (*cst_size != -1)
18965 return NULL;
18966
18967 ctx.context_type = const_cast<tree> (type);
18968 ctx.base_decl = NULL_TREE;
18969 ctx.dpi = NULL;
18970 ctx.placeholder_arg = false;
18971 ctx.placeholder_seen = false;
18972
18973 type = TYPE_MAIN_VARIANT (type);
18974 tree_size = TYPE_SIZE_UNIT (type);
18975 return ((tree_size != NULL_TREE)
18976 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18977 : NULL);
18978 }
18979
18980 /* Helper structure for RECORD_TYPE processing. */
18981 struct vlr_context
18982 {
18983 /* Root RECORD_TYPE. It is needed to generate data member location
18984 descriptions in variable-length records (VLR), but also to cope with
18985 variants, which are composed of nested structures multiplexed with
18986 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18987 function processing a FIELD_DECL, it is required to be non null. */
18988 tree struct_type;
18989 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18990 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18991 this variant part as part of the root record (in storage units). For
18992 regular records, it must be NULL_TREE. */
18993 tree variant_part_offset;
18994 };
18995
18996 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18997 addressed byte of the "containing object" for the given FIELD_DECL. If
18998 possible, return a native constant through CST_OFFSET (in which case NULL is
18999 returned); otherwise return a DWARF expression that computes the offset.
19000
19001 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19002 that offset is, either because the argument turns out to be a pointer to an
19003 ERROR_MARK node, or because the offset expression is too complex for us.
19004
19005 CTX is required: see the comment for VLR_CONTEXT. */
19006
19007 static dw_loc_descr_ref
19008 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19009 HOST_WIDE_INT *cst_offset)
19010 {
19011 tree tree_result;
19012 dw_loc_list_ref loc_result;
19013
19014 *cst_offset = 0;
19015
19016 if (TREE_CODE (decl) == ERROR_MARK)
19017 return NULL;
19018 else
19019 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19020
19021 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19022 case. */
19023 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19024 return NULL;
19025
19026 #ifdef PCC_BITFIELD_TYPE_MATTERS
19027 /* We used to handle only constant offsets in all cases. Now, we handle
19028 properly dynamic byte offsets only when PCC bitfield type doesn't
19029 matter. */
19030 if (PCC_BITFIELD_TYPE_MATTERS
19031 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19032 {
19033 offset_int object_offset_in_bits;
19034 offset_int object_offset_in_bytes;
19035 offset_int bitpos_int;
19036 tree type;
19037 tree field_size_tree;
19038 offset_int deepest_bitpos;
19039 offset_int field_size_in_bits;
19040 unsigned int type_align_in_bits;
19041 unsigned int decl_align_in_bits;
19042 offset_int type_size_in_bits;
19043
19044 bitpos_int = wi::to_offset (bit_position (decl));
19045 type = field_type (decl);
19046 type_size_in_bits = offset_int_type_size_in_bits (type);
19047 type_align_in_bits = simple_type_align_in_bits (type);
19048
19049 field_size_tree = DECL_SIZE (decl);
19050
19051 /* The size could be unspecified if there was an error, or for
19052 a flexible array member. */
19053 if (!field_size_tree)
19054 field_size_tree = bitsize_zero_node;
19055
19056 /* If the size of the field is not constant, use the type size. */
19057 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19058 field_size_in_bits = wi::to_offset (field_size_tree);
19059 else
19060 field_size_in_bits = type_size_in_bits;
19061
19062 decl_align_in_bits = simple_decl_align_in_bits (decl);
19063
19064 /* The GCC front-end doesn't make any attempt to keep track of the
19065 starting bit offset (relative to the start of the containing
19066 structure type) of the hypothetical "containing object" for a
19067 bit-field. Thus, when computing the byte offset value for the
19068 start of the "containing object" of a bit-field, we must deduce
19069 this information on our own. This can be rather tricky to do in
19070 some cases. For example, handling the following structure type
19071 definition when compiling for an i386/i486 target (which only
19072 aligns long long's to 32-bit boundaries) can be very tricky:
19073
19074 struct S { int field1; long long field2:31; };
19075
19076 Fortunately, there is a simple rule-of-thumb which can be used
19077 in such cases. When compiling for an i386/i486, GCC will
19078 allocate 8 bytes for the structure shown above. It decides to
19079 do this based upon one simple rule for bit-field allocation.
19080 GCC allocates each "containing object" for each bit-field at
19081 the first (i.e. lowest addressed) legitimate alignment boundary
19082 (based upon the required minimum alignment for the declared
19083 type of the field) which it can possibly use, subject to the
19084 condition that there is still enough available space remaining
19085 in the containing object (when allocated at the selected point)
19086 to fully accommodate all of the bits of the bit-field itself.
19087
19088 This simple rule makes it obvious why GCC allocates 8 bytes for
19089 each object of the structure type shown above. When looking
19090 for a place to allocate the "containing object" for `field2',
19091 the compiler simply tries to allocate a 64-bit "containing
19092 object" at each successive 32-bit boundary (starting at zero)
19093 until it finds a place to allocate that 64- bit field such that
19094 at least 31 contiguous (and previously unallocated) bits remain
19095 within that selected 64 bit field. (As it turns out, for the
19096 example above, the compiler finds it is OK to allocate the
19097 "containing object" 64-bit field at bit-offset zero within the
19098 structure type.)
19099
19100 Here we attempt to work backwards from the limited set of facts
19101 we're given, and we try to deduce from those facts, where GCC
19102 must have believed that the containing object started (within
19103 the structure type). The value we deduce is then used (by the
19104 callers of this routine) to generate DW_AT_location and
19105 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19106 the case of DW_AT_location, regular fields as well). */
19107
19108 /* Figure out the bit-distance from the start of the structure to
19109 the "deepest" bit of the bit-field. */
19110 deepest_bitpos = bitpos_int + field_size_in_bits;
19111
19112 /* This is the tricky part. Use some fancy footwork to deduce
19113 where the lowest addressed bit of the containing object must
19114 be. */
19115 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19116
19117 /* Round up to type_align by default. This works best for
19118 bitfields. */
19119 object_offset_in_bits
19120 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19121
19122 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19123 {
19124 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19125
19126 /* Round up to decl_align instead. */
19127 object_offset_in_bits
19128 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19129 }
19130
19131 object_offset_in_bytes
19132 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19133 if (ctx->variant_part_offset == NULL_TREE)
19134 {
19135 *cst_offset = object_offset_in_bytes.to_shwi ();
19136 return NULL;
19137 }
19138 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19139 }
19140 else
19141 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19142 tree_result = byte_position (decl);
19143
19144 if (ctx->variant_part_offset != NULL_TREE)
19145 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19146 ctx->variant_part_offset, tree_result);
19147
19148 /* If the byte offset is a constant, it's simplier to handle a native
19149 constant rather than a DWARF expression. */
19150 if (TREE_CODE (tree_result) == INTEGER_CST)
19151 {
19152 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19153 return NULL;
19154 }
19155 struct loc_descr_context loc_ctx = {
19156 ctx->struct_type, /* context_type */
19157 NULL_TREE, /* base_decl */
19158 NULL, /* dpi */
19159 false, /* placeholder_arg */
19160 false /* placeholder_seen */
19161 };
19162 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19163
19164 /* We want a DWARF expression: abort if we only have a location list with
19165 multiple elements. */
19166 if (!loc_result || !single_element_loc_list_p (loc_result))
19167 return NULL;
19168 else
19169 return loc_result->expr;
19170 }
19171 \f
19172 /* The following routines define various Dwarf attributes and any data
19173 associated with them. */
19174
19175 /* Add a location description attribute value to a DIE.
19176
19177 This emits location attributes suitable for whole variables and
19178 whole parameters. Note that the location attributes for struct fields are
19179 generated by the routine `data_member_location_attribute' below. */
19180
19181 static inline void
19182 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19183 dw_loc_list_ref descr)
19184 {
19185 bool check_no_locviews = true;
19186 if (descr == 0)
19187 return;
19188 if (single_element_loc_list_p (descr))
19189 add_AT_loc (die, attr_kind, descr->expr);
19190 else
19191 {
19192 add_AT_loc_list (die, attr_kind, descr);
19193 gcc_assert (descr->ll_symbol);
19194 if (attr_kind == DW_AT_location && descr->vl_symbol
19195 && dwarf2out_locviews_in_attribute ())
19196 {
19197 add_AT_view_list (die, DW_AT_GNU_locviews);
19198 check_no_locviews = false;
19199 }
19200 }
19201
19202 if (check_no_locviews)
19203 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19204 }
19205
19206 /* Add DW_AT_accessibility attribute to DIE if needed. */
19207
19208 static void
19209 add_accessibility_attribute (dw_die_ref die, tree decl)
19210 {
19211 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19212 children, otherwise the default is DW_ACCESS_public. In DWARF2
19213 the default has always been DW_ACCESS_public. */
19214 if (TREE_PROTECTED (decl))
19215 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19216 else if (TREE_PRIVATE (decl))
19217 {
19218 if (dwarf_version == 2
19219 || die->die_parent == NULL
19220 || die->die_parent->die_tag != DW_TAG_class_type)
19221 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19222 }
19223 else if (dwarf_version > 2
19224 && die->die_parent
19225 && die->die_parent->die_tag == DW_TAG_class_type)
19226 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19227 }
19228
19229 /* Attach the specialized form of location attribute used for data members of
19230 struct and union types. In the special case of a FIELD_DECL node which
19231 represents a bit-field, the "offset" part of this special location
19232 descriptor must indicate the distance in bytes from the lowest-addressed
19233 byte of the containing struct or union type to the lowest-addressed byte of
19234 the "containing object" for the bit-field. (See the `field_byte_offset'
19235 function above).
19236
19237 For any given bit-field, the "containing object" is a hypothetical object
19238 (of some integral or enum type) within which the given bit-field lives. The
19239 type of this hypothetical "containing object" is always the same as the
19240 declared type of the individual bit-field itself (for GCC anyway... the
19241 DWARF spec doesn't actually mandate this). Note that it is the size (in
19242 bytes) of the hypothetical "containing object" which will be given in the
19243 DW_AT_byte_size attribute for this bit-field. (See the
19244 `byte_size_attribute' function below.) It is also used when calculating the
19245 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19246 function below.)
19247
19248 CTX is required: see the comment for VLR_CONTEXT. */
19249
19250 static void
19251 add_data_member_location_attribute (dw_die_ref die,
19252 tree decl,
19253 struct vlr_context *ctx)
19254 {
19255 HOST_WIDE_INT offset;
19256 dw_loc_descr_ref loc_descr = 0;
19257
19258 if (TREE_CODE (decl) == TREE_BINFO)
19259 {
19260 /* We're working on the TAG_inheritance for a base class. */
19261 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19262 {
19263 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19264 aren't at a fixed offset from all (sub)objects of the same
19265 type. We need to extract the appropriate offset from our
19266 vtable. The following dwarf expression means
19267
19268 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19269
19270 This is specific to the V3 ABI, of course. */
19271
19272 dw_loc_descr_ref tmp;
19273
19274 /* Make a copy of the object address. */
19275 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19276 add_loc_descr (&loc_descr, tmp);
19277
19278 /* Extract the vtable address. */
19279 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19280 add_loc_descr (&loc_descr, tmp);
19281
19282 /* Calculate the address of the offset. */
19283 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19284 gcc_assert (offset < 0);
19285
19286 tmp = int_loc_descriptor (-offset);
19287 add_loc_descr (&loc_descr, tmp);
19288 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19289 add_loc_descr (&loc_descr, tmp);
19290
19291 /* Extract the offset. */
19292 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19293 add_loc_descr (&loc_descr, tmp);
19294
19295 /* Add it to the object address. */
19296 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19297 add_loc_descr (&loc_descr, tmp);
19298 }
19299 else
19300 offset = tree_to_shwi (BINFO_OFFSET (decl));
19301 }
19302 else
19303 {
19304 loc_descr = field_byte_offset (decl, ctx, &offset);
19305
19306 /* If loc_descr is available then we know the field offset is dynamic.
19307 However, GDB does not handle dynamic field offsets very well at the
19308 moment. */
19309 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19310 {
19311 loc_descr = NULL;
19312 offset = 0;
19313 }
19314
19315 /* Data member location evalutation starts with the base address on the
19316 stack. Compute the field offset and add it to this base address. */
19317 else if (loc_descr != NULL)
19318 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19319 }
19320
19321 if (! loc_descr)
19322 {
19323 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19324 e.g. GDB only added support to it in November 2016. For DWARF5
19325 we need newer debug info consumers anyway. We might change this
19326 to dwarf_version >= 4 once most consumers catched up. */
19327 if (dwarf_version >= 5
19328 && TREE_CODE (decl) == FIELD_DECL
19329 && DECL_BIT_FIELD_TYPE (decl))
19330 {
19331 tree off = bit_position (decl);
19332 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19333 {
19334 remove_AT (die, DW_AT_byte_size);
19335 remove_AT (die, DW_AT_bit_offset);
19336 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19337 return;
19338 }
19339 }
19340 if (dwarf_version > 2)
19341 {
19342 /* Don't need to output a location expression, just the constant. */
19343 if (offset < 0)
19344 add_AT_int (die, DW_AT_data_member_location, offset);
19345 else
19346 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19347 return;
19348 }
19349 else
19350 {
19351 enum dwarf_location_atom op;
19352
19353 /* The DWARF2 standard says that we should assume that the structure
19354 address is already on the stack, so we can specify a structure
19355 field address by using DW_OP_plus_uconst. */
19356 op = DW_OP_plus_uconst;
19357 loc_descr = new_loc_descr (op, offset, 0);
19358 }
19359 }
19360
19361 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19362 }
19363
19364 /* Writes integer values to dw_vec_const array. */
19365
19366 static void
19367 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19368 {
19369 while (size != 0)
19370 {
19371 *dest++ = val & 0xff;
19372 val >>= 8;
19373 --size;
19374 }
19375 }
19376
19377 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19378
19379 static HOST_WIDE_INT
19380 extract_int (const unsigned char *src, unsigned int size)
19381 {
19382 HOST_WIDE_INT val = 0;
19383
19384 src += size;
19385 while (size != 0)
19386 {
19387 val <<= 8;
19388 val |= *--src & 0xff;
19389 --size;
19390 }
19391 return val;
19392 }
19393
19394 /* Writes wide_int values to dw_vec_const array. */
19395
19396 static void
19397 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19398 {
19399 int i;
19400
19401 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19402 {
19403 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19404 return;
19405 }
19406
19407 /* We'd have to extend this code to support odd sizes. */
19408 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19409
19410 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19411
19412 if (WORDS_BIG_ENDIAN)
19413 for (i = n - 1; i >= 0; i--)
19414 {
19415 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19416 dest += sizeof (HOST_WIDE_INT);
19417 }
19418 else
19419 for (i = 0; i < n; i++)
19420 {
19421 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19422 dest += sizeof (HOST_WIDE_INT);
19423 }
19424 }
19425
19426 /* Writes floating point values to dw_vec_const array. */
19427
19428 static void
19429 insert_float (const_rtx rtl, unsigned char *array)
19430 {
19431 long val[4];
19432 int i;
19433 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19434
19435 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19436
19437 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19438 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19439 {
19440 insert_int (val[i], 4, array);
19441 array += 4;
19442 }
19443 }
19444
19445 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19446 does not have a "location" either in memory or in a register. These
19447 things can arise in GNU C when a constant is passed as an actual parameter
19448 to an inlined function. They can also arise in C++ where declared
19449 constants do not necessarily get memory "homes". */
19450
19451 static bool
19452 add_const_value_attribute (dw_die_ref die, rtx rtl)
19453 {
19454 switch (GET_CODE (rtl))
19455 {
19456 case CONST_INT:
19457 {
19458 HOST_WIDE_INT val = INTVAL (rtl);
19459
19460 if (val < 0)
19461 add_AT_int (die, DW_AT_const_value, val);
19462 else
19463 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19464 }
19465 return true;
19466
19467 case CONST_WIDE_INT:
19468 {
19469 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19470 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19471 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19472 wide_int w = wi::zext (w1, prec);
19473 add_AT_wide (die, DW_AT_const_value, w);
19474 }
19475 return true;
19476
19477 case CONST_DOUBLE:
19478 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19479 floating-point constant. A CONST_DOUBLE is used whenever the
19480 constant requires more than one word in order to be adequately
19481 represented. */
19482 if (TARGET_SUPPORTS_WIDE_INT == 0
19483 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19484 add_AT_double (die, DW_AT_const_value,
19485 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19486 else
19487 {
19488 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19489 unsigned int length = GET_MODE_SIZE (mode);
19490 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19491
19492 insert_float (rtl, array);
19493 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19494 }
19495 return true;
19496
19497 case CONST_VECTOR:
19498 {
19499 unsigned int length;
19500 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19501 return false;
19502
19503 machine_mode mode = GET_MODE (rtl);
19504 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19505 unsigned char *array
19506 = ggc_vec_alloc<unsigned char> (length * elt_size);
19507 unsigned int i;
19508 unsigned char *p;
19509 machine_mode imode = GET_MODE_INNER (mode);
19510
19511 switch (GET_MODE_CLASS (mode))
19512 {
19513 case MODE_VECTOR_INT:
19514 for (i = 0, p = array; i < length; i++, p += elt_size)
19515 {
19516 rtx elt = CONST_VECTOR_ELT (rtl, i);
19517 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19518 }
19519 break;
19520
19521 case MODE_VECTOR_FLOAT:
19522 for (i = 0, p = array; i < length; i++, p += elt_size)
19523 {
19524 rtx elt = CONST_VECTOR_ELT (rtl, i);
19525 insert_float (elt, p);
19526 }
19527 break;
19528
19529 default:
19530 gcc_unreachable ();
19531 }
19532
19533 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19534 }
19535 return true;
19536
19537 case CONST_STRING:
19538 if (dwarf_version >= 4 || !dwarf_strict)
19539 {
19540 dw_loc_descr_ref loc_result;
19541 resolve_one_addr (&rtl);
19542 rtl_addr:
19543 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19544 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19545 add_AT_loc (die, DW_AT_location, loc_result);
19546 vec_safe_push (used_rtx_array, rtl);
19547 return true;
19548 }
19549 return false;
19550
19551 case CONST:
19552 if (CONSTANT_P (XEXP (rtl, 0)))
19553 return add_const_value_attribute (die, XEXP (rtl, 0));
19554 /* FALLTHROUGH */
19555 case SYMBOL_REF:
19556 if (!const_ok_for_output (rtl))
19557 return false;
19558 /* FALLTHROUGH */
19559 case LABEL_REF:
19560 if (dwarf_version >= 4 || !dwarf_strict)
19561 goto rtl_addr;
19562 return false;
19563
19564 case PLUS:
19565 /* In cases where an inlined instance of an inline function is passed
19566 the address of an `auto' variable (which is local to the caller) we
19567 can get a situation where the DECL_RTL of the artificial local
19568 variable (for the inlining) which acts as a stand-in for the
19569 corresponding formal parameter (of the inline function) will look
19570 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19571 exactly a compile-time constant expression, but it isn't the address
19572 of the (artificial) local variable either. Rather, it represents the
19573 *value* which the artificial local variable always has during its
19574 lifetime. We currently have no way to represent such quasi-constant
19575 values in Dwarf, so for now we just punt and generate nothing. */
19576 return false;
19577
19578 case HIGH:
19579 case CONST_FIXED:
19580 return false;
19581
19582 case MEM:
19583 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19584 && MEM_READONLY_P (rtl)
19585 && GET_MODE (rtl) == BLKmode)
19586 {
19587 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19588 return true;
19589 }
19590 return false;
19591
19592 default:
19593 /* No other kinds of rtx should be possible here. */
19594 gcc_unreachable ();
19595 }
19596 return false;
19597 }
19598
19599 /* Determine whether the evaluation of EXPR references any variables
19600 or functions which aren't otherwise used (and therefore may not be
19601 output). */
19602 static tree
19603 reference_to_unused (tree * tp, int * walk_subtrees,
19604 void * data ATTRIBUTE_UNUSED)
19605 {
19606 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19607 *walk_subtrees = 0;
19608
19609 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19610 && ! TREE_ASM_WRITTEN (*tp))
19611 return *tp;
19612 /* ??? The C++ FE emits debug information for using decls, so
19613 putting gcc_unreachable here falls over. See PR31899. For now
19614 be conservative. */
19615 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19616 return *tp;
19617 else if (VAR_P (*tp))
19618 {
19619 varpool_node *node = varpool_node::get (*tp);
19620 if (!node || !node->definition)
19621 return *tp;
19622 }
19623 else if (TREE_CODE (*tp) == FUNCTION_DECL
19624 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19625 {
19626 /* The call graph machinery must have finished analyzing,
19627 optimizing and gimplifying the CU by now.
19628 So if *TP has no call graph node associated
19629 to it, it means *TP will not be emitted. */
19630 if (!cgraph_node::get (*tp))
19631 return *tp;
19632 }
19633 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19634 return *tp;
19635
19636 return NULL_TREE;
19637 }
19638
19639 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19640 for use in a later add_const_value_attribute call. */
19641
19642 static rtx
19643 rtl_for_decl_init (tree init, tree type)
19644 {
19645 rtx rtl = NULL_RTX;
19646
19647 STRIP_NOPS (init);
19648
19649 /* If a variable is initialized with a string constant without embedded
19650 zeros, build CONST_STRING. */
19651 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19652 {
19653 tree enttype = TREE_TYPE (type);
19654 tree domain = TYPE_DOMAIN (type);
19655 scalar_int_mode mode;
19656
19657 if (is_int_mode (TYPE_MODE (enttype), &mode)
19658 && GET_MODE_SIZE (mode) == 1
19659 && domain
19660 && TYPE_MAX_VALUE (domain)
19661 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19662 && integer_zerop (TYPE_MIN_VALUE (domain))
19663 && compare_tree_int (TYPE_MAX_VALUE (domain),
19664 TREE_STRING_LENGTH (init) - 1) == 0
19665 && ((size_t) TREE_STRING_LENGTH (init)
19666 == strlen (TREE_STRING_POINTER (init)) + 1))
19667 {
19668 rtl = gen_rtx_CONST_STRING (VOIDmode,
19669 ggc_strdup (TREE_STRING_POINTER (init)));
19670 rtl = gen_rtx_MEM (BLKmode, rtl);
19671 MEM_READONLY_P (rtl) = 1;
19672 }
19673 }
19674 /* Other aggregates, and complex values, could be represented using
19675 CONCAT: FIXME! */
19676 else if (AGGREGATE_TYPE_P (type)
19677 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19678 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19679 || TREE_CODE (type) == COMPLEX_TYPE)
19680 ;
19681 /* Vectors only work if their mode is supported by the target.
19682 FIXME: generic vectors ought to work too. */
19683 else if (TREE_CODE (type) == VECTOR_TYPE
19684 && !VECTOR_MODE_P (TYPE_MODE (type)))
19685 ;
19686 /* If the initializer is something that we know will expand into an
19687 immediate RTL constant, expand it now. We must be careful not to
19688 reference variables which won't be output. */
19689 else if (initializer_constant_valid_p (init, type)
19690 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19691 {
19692 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19693 possible. */
19694 if (TREE_CODE (type) == VECTOR_TYPE)
19695 switch (TREE_CODE (init))
19696 {
19697 case VECTOR_CST:
19698 break;
19699 case CONSTRUCTOR:
19700 if (TREE_CONSTANT (init))
19701 {
19702 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19703 bool constant_p = true;
19704 tree value;
19705 unsigned HOST_WIDE_INT ix;
19706
19707 /* Even when ctor is constant, it might contain non-*_CST
19708 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19709 belong into VECTOR_CST nodes. */
19710 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19711 if (!CONSTANT_CLASS_P (value))
19712 {
19713 constant_p = false;
19714 break;
19715 }
19716
19717 if (constant_p)
19718 {
19719 init = build_vector_from_ctor (type, elts);
19720 break;
19721 }
19722 }
19723 /* FALLTHRU */
19724
19725 default:
19726 return NULL;
19727 }
19728
19729 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19730
19731 /* If expand_expr returns a MEM, it wasn't immediate. */
19732 gcc_assert (!rtl || !MEM_P (rtl));
19733 }
19734
19735 return rtl;
19736 }
19737
19738 /* Generate RTL for the variable DECL to represent its location. */
19739
19740 static rtx
19741 rtl_for_decl_location (tree decl)
19742 {
19743 rtx rtl;
19744
19745 /* Here we have to decide where we are going to say the parameter "lives"
19746 (as far as the debugger is concerned). We only have a couple of
19747 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19748
19749 DECL_RTL normally indicates where the parameter lives during most of the
19750 activation of the function. If optimization is enabled however, this
19751 could be either NULL or else a pseudo-reg. Both of those cases indicate
19752 that the parameter doesn't really live anywhere (as far as the code
19753 generation parts of GCC are concerned) during most of the function's
19754 activation. That will happen (for example) if the parameter is never
19755 referenced within the function.
19756
19757 We could just generate a location descriptor here for all non-NULL
19758 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19759 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19760 where DECL_RTL is NULL or is a pseudo-reg.
19761
19762 Note however that we can only get away with using DECL_INCOMING_RTL as
19763 a backup substitute for DECL_RTL in certain limited cases. In cases
19764 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19765 we can be sure that the parameter was passed using the same type as it is
19766 declared to have within the function, and that its DECL_INCOMING_RTL
19767 points us to a place where a value of that type is passed.
19768
19769 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19770 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19771 because in these cases DECL_INCOMING_RTL points us to a value of some
19772 type which is *different* from the type of the parameter itself. Thus,
19773 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19774 such cases, the debugger would end up (for example) trying to fetch a
19775 `float' from a place which actually contains the first part of a
19776 `double'. That would lead to really incorrect and confusing
19777 output at debug-time.
19778
19779 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19780 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19781 are a couple of exceptions however. On little-endian machines we can
19782 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19783 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19784 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19785 when (on a little-endian machine) a non-prototyped function has a
19786 parameter declared to be of type `short' or `char'. In such cases,
19787 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19788 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19789 passed `int' value. If the debugger then uses that address to fetch
19790 a `short' or a `char' (on a little-endian machine) the result will be
19791 the correct data, so we allow for such exceptional cases below.
19792
19793 Note that our goal here is to describe the place where the given formal
19794 parameter lives during most of the function's activation (i.e. between the
19795 end of the prologue and the start of the epilogue). We'll do that as best
19796 as we can. Note however that if the given formal parameter is modified
19797 sometime during the execution of the function, then a stack backtrace (at
19798 debug-time) will show the function as having been called with the *new*
19799 value rather than the value which was originally passed in. This happens
19800 rarely enough that it is not a major problem, but it *is* a problem, and
19801 I'd like to fix it.
19802
19803 A future version of dwarf2out.c may generate two additional attributes for
19804 any given DW_TAG_formal_parameter DIE which will describe the "passed
19805 type" and the "passed location" for the given formal parameter in addition
19806 to the attributes we now generate to indicate the "declared type" and the
19807 "active location" for each parameter. This additional set of attributes
19808 could be used by debuggers for stack backtraces. Separately, note that
19809 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19810 This happens (for example) for inlined-instances of inline function formal
19811 parameters which are never referenced. This really shouldn't be
19812 happening. All PARM_DECL nodes should get valid non-NULL
19813 DECL_INCOMING_RTL values. FIXME. */
19814
19815 /* Use DECL_RTL as the "location" unless we find something better. */
19816 rtl = DECL_RTL_IF_SET (decl);
19817
19818 /* When generating abstract instances, ignore everything except
19819 constants, symbols living in memory, and symbols living in
19820 fixed registers. */
19821 if (! reload_completed)
19822 {
19823 if (rtl
19824 && (CONSTANT_P (rtl)
19825 || (MEM_P (rtl)
19826 && CONSTANT_P (XEXP (rtl, 0)))
19827 || (REG_P (rtl)
19828 && VAR_P (decl)
19829 && TREE_STATIC (decl))))
19830 {
19831 rtl = targetm.delegitimize_address (rtl);
19832 return rtl;
19833 }
19834 rtl = NULL_RTX;
19835 }
19836 else if (TREE_CODE (decl) == PARM_DECL)
19837 {
19838 if (rtl == NULL_RTX
19839 || is_pseudo_reg (rtl)
19840 || (MEM_P (rtl)
19841 && is_pseudo_reg (XEXP (rtl, 0))
19842 && DECL_INCOMING_RTL (decl)
19843 && MEM_P (DECL_INCOMING_RTL (decl))
19844 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19845 {
19846 tree declared_type = TREE_TYPE (decl);
19847 tree passed_type = DECL_ARG_TYPE (decl);
19848 machine_mode dmode = TYPE_MODE (declared_type);
19849 machine_mode pmode = TYPE_MODE (passed_type);
19850
19851 /* This decl represents a formal parameter which was optimized out.
19852 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19853 all cases where (rtl == NULL_RTX) just below. */
19854 if (dmode == pmode)
19855 rtl = DECL_INCOMING_RTL (decl);
19856 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19857 && SCALAR_INT_MODE_P (dmode)
19858 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19859 && DECL_INCOMING_RTL (decl))
19860 {
19861 rtx inc = DECL_INCOMING_RTL (decl);
19862 if (REG_P (inc))
19863 rtl = inc;
19864 else if (MEM_P (inc))
19865 {
19866 if (BYTES_BIG_ENDIAN)
19867 rtl = adjust_address_nv (inc, dmode,
19868 GET_MODE_SIZE (pmode)
19869 - GET_MODE_SIZE (dmode));
19870 else
19871 rtl = inc;
19872 }
19873 }
19874 }
19875
19876 /* If the parm was passed in registers, but lives on the stack, then
19877 make a big endian correction if the mode of the type of the
19878 parameter is not the same as the mode of the rtl. */
19879 /* ??? This is the same series of checks that are made in dbxout.c before
19880 we reach the big endian correction code there. It isn't clear if all
19881 of these checks are necessary here, but keeping them all is the safe
19882 thing to do. */
19883 else if (MEM_P (rtl)
19884 && XEXP (rtl, 0) != const0_rtx
19885 && ! CONSTANT_P (XEXP (rtl, 0))
19886 /* Not passed in memory. */
19887 && !MEM_P (DECL_INCOMING_RTL (decl))
19888 /* Not passed by invisible reference. */
19889 && (!REG_P (XEXP (rtl, 0))
19890 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19891 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19892 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19893 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19894 #endif
19895 )
19896 /* Big endian correction check. */
19897 && BYTES_BIG_ENDIAN
19898 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19899 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19900 UNITS_PER_WORD))
19901 {
19902 machine_mode addr_mode = get_address_mode (rtl);
19903 poly_int64 offset = (UNITS_PER_WORD
19904 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19905
19906 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19907 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19908 }
19909 }
19910 else if (VAR_P (decl)
19911 && rtl
19912 && MEM_P (rtl)
19913 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19914 {
19915 machine_mode addr_mode = get_address_mode (rtl);
19916 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19917 GET_MODE (rtl));
19918
19919 /* If a variable is declared "register" yet is smaller than
19920 a register, then if we store the variable to memory, it
19921 looks like we're storing a register-sized value, when in
19922 fact we are not. We need to adjust the offset of the
19923 storage location to reflect the actual value's bytes,
19924 else gdb will not be able to display it. */
19925 if (maybe_ne (offset, 0))
19926 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19927 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19928 }
19929
19930 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19931 and will have been substituted directly into all expressions that use it.
19932 C does not have such a concept, but C++ and other languages do. */
19933 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19934 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19935
19936 if (rtl)
19937 rtl = targetm.delegitimize_address (rtl);
19938
19939 /* If we don't look past the constant pool, we risk emitting a
19940 reference to a constant pool entry that isn't referenced from
19941 code, and thus is not emitted. */
19942 if (rtl)
19943 rtl = avoid_constant_pool_reference (rtl);
19944
19945 /* Try harder to get a rtl. If this symbol ends up not being emitted
19946 in the current CU, resolve_addr will remove the expression referencing
19947 it. */
19948 if (rtl == NULL_RTX
19949 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19950 && VAR_P (decl)
19951 && !DECL_EXTERNAL (decl)
19952 && TREE_STATIC (decl)
19953 && DECL_NAME (decl)
19954 && !DECL_HARD_REGISTER (decl)
19955 && DECL_MODE (decl) != VOIDmode)
19956 {
19957 rtl = make_decl_rtl_for_debug (decl);
19958 if (!MEM_P (rtl)
19959 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19960 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19961 rtl = NULL_RTX;
19962 }
19963
19964 return rtl;
19965 }
19966
19967 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19968 returned. If so, the decl for the COMMON block is returned, and the
19969 value is the offset into the common block for the symbol. */
19970
19971 static tree
19972 fortran_common (tree decl, HOST_WIDE_INT *value)
19973 {
19974 tree val_expr, cvar;
19975 machine_mode mode;
19976 poly_int64 bitsize, bitpos;
19977 tree offset;
19978 HOST_WIDE_INT cbitpos;
19979 int unsignedp, reversep, volatilep = 0;
19980
19981 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19982 it does not have a value (the offset into the common area), or if it
19983 is thread local (as opposed to global) then it isn't common, and shouldn't
19984 be handled as such. */
19985 if (!VAR_P (decl)
19986 || !TREE_STATIC (decl)
19987 || !DECL_HAS_VALUE_EXPR_P (decl)
19988 || !is_fortran ())
19989 return NULL_TREE;
19990
19991 val_expr = DECL_VALUE_EXPR (decl);
19992 if (TREE_CODE (val_expr) != COMPONENT_REF)
19993 return NULL_TREE;
19994
19995 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19996 &unsignedp, &reversep, &volatilep);
19997
19998 if (cvar == NULL_TREE
19999 || !VAR_P (cvar)
20000 || DECL_ARTIFICIAL (cvar)
20001 || !TREE_PUBLIC (cvar)
20002 /* We don't expect to have to cope with variable offsets,
20003 since at present all static data must have a constant size. */
20004 || !bitpos.is_constant (&cbitpos))
20005 return NULL_TREE;
20006
20007 *value = 0;
20008 if (offset != NULL)
20009 {
20010 if (!tree_fits_shwi_p (offset))
20011 return NULL_TREE;
20012 *value = tree_to_shwi (offset);
20013 }
20014 if (cbitpos != 0)
20015 *value += cbitpos / BITS_PER_UNIT;
20016
20017 return cvar;
20018 }
20019
20020 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20021 data attribute for a variable or a parameter. We generate the
20022 DW_AT_const_value attribute only in those cases where the given variable
20023 or parameter does not have a true "location" either in memory or in a
20024 register. This can happen (for example) when a constant is passed as an
20025 actual argument in a call to an inline function. (It's possible that
20026 these things can crop up in other ways also.) Note that one type of
20027 constant value which can be passed into an inlined function is a constant
20028 pointer. This can happen for example if an actual argument in an inlined
20029 function call evaluates to a compile-time constant address.
20030
20031 CACHE_P is true if it is worth caching the location list for DECL,
20032 so that future calls can reuse it rather than regenerate it from scratch.
20033 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20034 since we will need to refer to them each time the function is inlined. */
20035
20036 static bool
20037 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20038 {
20039 rtx rtl;
20040 dw_loc_list_ref list;
20041 var_loc_list *loc_list;
20042 cached_dw_loc_list *cache;
20043
20044 if (early_dwarf)
20045 return false;
20046
20047 if (TREE_CODE (decl) == ERROR_MARK)
20048 return false;
20049
20050 if (get_AT (die, DW_AT_location)
20051 || get_AT (die, DW_AT_const_value))
20052 return true;
20053
20054 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20055 || TREE_CODE (decl) == RESULT_DECL);
20056
20057 /* Try to get some constant RTL for this decl, and use that as the value of
20058 the location. */
20059
20060 rtl = rtl_for_decl_location (decl);
20061 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20062 && add_const_value_attribute (die, rtl))
20063 return true;
20064
20065 /* See if we have single element location list that is equivalent to
20066 a constant value. That way we are better to use add_const_value_attribute
20067 rather than expanding constant value equivalent. */
20068 loc_list = lookup_decl_loc (decl);
20069 if (loc_list
20070 && loc_list->first
20071 && loc_list->first->next == NULL
20072 && NOTE_P (loc_list->first->loc)
20073 && NOTE_VAR_LOCATION (loc_list->first->loc)
20074 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20075 {
20076 struct var_loc_node *node;
20077
20078 node = loc_list->first;
20079 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20080 if (GET_CODE (rtl) == EXPR_LIST)
20081 rtl = XEXP (rtl, 0);
20082 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20083 && add_const_value_attribute (die, rtl))
20084 return true;
20085 }
20086 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20087 list several times. See if we've already cached the contents. */
20088 list = NULL;
20089 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20090 cache_p = false;
20091 if (cache_p)
20092 {
20093 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20094 if (cache)
20095 list = cache->loc_list;
20096 }
20097 if (list == NULL)
20098 {
20099 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20100 NULL);
20101 /* It is usually worth caching this result if the decl is from
20102 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20103 if (cache_p && list && list->dw_loc_next)
20104 {
20105 cached_dw_loc_list **slot
20106 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20107 DECL_UID (decl),
20108 INSERT);
20109 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20110 cache->decl_id = DECL_UID (decl);
20111 cache->loc_list = list;
20112 *slot = cache;
20113 }
20114 }
20115 if (list)
20116 {
20117 add_AT_location_description (die, DW_AT_location, list);
20118 return true;
20119 }
20120 /* None of that worked, so it must not really have a location;
20121 try adding a constant value attribute from the DECL_INITIAL. */
20122 return tree_add_const_value_attribute_for_decl (die, decl);
20123 }
20124
20125 /* Helper function for tree_add_const_value_attribute. Natively encode
20126 initializer INIT into an array. Return true if successful. */
20127
20128 static bool
20129 native_encode_initializer (tree init, unsigned char *array, int size)
20130 {
20131 tree type;
20132
20133 if (init == NULL_TREE)
20134 return false;
20135
20136 STRIP_NOPS (init);
20137 switch (TREE_CODE (init))
20138 {
20139 case STRING_CST:
20140 type = TREE_TYPE (init);
20141 if (TREE_CODE (type) == ARRAY_TYPE)
20142 {
20143 tree enttype = TREE_TYPE (type);
20144 scalar_int_mode mode;
20145
20146 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20147 || GET_MODE_SIZE (mode) != 1)
20148 return false;
20149 if (int_size_in_bytes (type) != size)
20150 return false;
20151 if (size > TREE_STRING_LENGTH (init))
20152 {
20153 memcpy (array, TREE_STRING_POINTER (init),
20154 TREE_STRING_LENGTH (init));
20155 memset (array + TREE_STRING_LENGTH (init),
20156 '\0', size - TREE_STRING_LENGTH (init));
20157 }
20158 else
20159 memcpy (array, TREE_STRING_POINTER (init), size);
20160 return true;
20161 }
20162 return false;
20163 case CONSTRUCTOR:
20164 type = TREE_TYPE (init);
20165 if (int_size_in_bytes (type) != size)
20166 return false;
20167 if (TREE_CODE (type) == ARRAY_TYPE)
20168 {
20169 HOST_WIDE_INT min_index;
20170 unsigned HOST_WIDE_INT cnt;
20171 int curpos = 0, fieldsize;
20172 constructor_elt *ce;
20173
20174 if (TYPE_DOMAIN (type) == NULL_TREE
20175 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20176 return false;
20177
20178 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20179 if (fieldsize <= 0)
20180 return false;
20181
20182 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20183 memset (array, '\0', size);
20184 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20185 {
20186 tree val = ce->value;
20187 tree index = ce->index;
20188 int pos = curpos;
20189 if (index && TREE_CODE (index) == RANGE_EXPR)
20190 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20191 * fieldsize;
20192 else if (index)
20193 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20194
20195 if (val)
20196 {
20197 STRIP_NOPS (val);
20198 if (!native_encode_initializer (val, array + pos, fieldsize))
20199 return false;
20200 }
20201 curpos = pos + fieldsize;
20202 if (index && TREE_CODE (index) == RANGE_EXPR)
20203 {
20204 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20205 - tree_to_shwi (TREE_OPERAND (index, 0));
20206 while (count-- > 0)
20207 {
20208 if (val)
20209 memcpy (array + curpos, array + pos, fieldsize);
20210 curpos += fieldsize;
20211 }
20212 }
20213 gcc_assert (curpos <= size);
20214 }
20215 return true;
20216 }
20217 else if (TREE_CODE (type) == RECORD_TYPE
20218 || TREE_CODE (type) == UNION_TYPE)
20219 {
20220 tree field = NULL_TREE;
20221 unsigned HOST_WIDE_INT cnt;
20222 constructor_elt *ce;
20223
20224 if (int_size_in_bytes (type) != size)
20225 return false;
20226
20227 if (TREE_CODE (type) == RECORD_TYPE)
20228 field = TYPE_FIELDS (type);
20229
20230 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20231 {
20232 tree val = ce->value;
20233 int pos, fieldsize;
20234
20235 if (ce->index != 0)
20236 field = ce->index;
20237
20238 if (val)
20239 STRIP_NOPS (val);
20240
20241 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20242 return false;
20243
20244 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20245 && TYPE_DOMAIN (TREE_TYPE (field))
20246 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20247 return false;
20248 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20249 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20250 return false;
20251 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20252 pos = int_byte_position (field);
20253 gcc_assert (pos + fieldsize <= size);
20254 if (val && fieldsize != 0
20255 && !native_encode_initializer (val, array + pos, fieldsize))
20256 return false;
20257 }
20258 return true;
20259 }
20260 return false;
20261 case VIEW_CONVERT_EXPR:
20262 case NON_LVALUE_EXPR:
20263 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20264 default:
20265 return native_encode_expr (init, array, size) == size;
20266 }
20267 }
20268
20269 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20270 attribute is the const value T. */
20271
20272 static bool
20273 tree_add_const_value_attribute (dw_die_ref die, tree t)
20274 {
20275 tree init;
20276 tree type = TREE_TYPE (t);
20277 rtx rtl;
20278
20279 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20280 return false;
20281
20282 init = t;
20283 gcc_assert (!DECL_P (init));
20284
20285 if (TREE_CODE (init) == INTEGER_CST)
20286 {
20287 if (tree_fits_uhwi_p (init))
20288 {
20289 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20290 return true;
20291 }
20292 if (tree_fits_shwi_p (init))
20293 {
20294 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20295 return true;
20296 }
20297 }
20298 if (! early_dwarf)
20299 {
20300 rtl = rtl_for_decl_init (init, type);
20301 if (rtl)
20302 return add_const_value_attribute (die, rtl);
20303 }
20304 /* If the host and target are sane, try harder. */
20305 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20306 && initializer_constant_valid_p (init, type))
20307 {
20308 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20309 if (size > 0 && (int) size == size)
20310 {
20311 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20312
20313 if (native_encode_initializer (init, array, size))
20314 {
20315 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20316 return true;
20317 }
20318 ggc_free (array);
20319 }
20320 }
20321 return false;
20322 }
20323
20324 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20325 attribute is the const value of T, where T is an integral constant
20326 variable with static storage duration
20327 (so it can't be a PARM_DECL or a RESULT_DECL). */
20328
20329 static bool
20330 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20331 {
20332
20333 if (!decl
20334 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20335 || (VAR_P (decl) && !TREE_STATIC (decl)))
20336 return false;
20337
20338 if (TREE_READONLY (decl)
20339 && ! TREE_THIS_VOLATILE (decl)
20340 && DECL_INITIAL (decl))
20341 /* OK */;
20342 else
20343 return false;
20344
20345 /* Don't add DW_AT_const_value if abstract origin already has one. */
20346 if (get_AT (var_die, DW_AT_const_value))
20347 return false;
20348
20349 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20350 }
20351
20352 /* Convert the CFI instructions for the current function into a
20353 location list. This is used for DW_AT_frame_base when we targeting
20354 a dwarf2 consumer that does not support the dwarf3
20355 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20356 expressions. */
20357
20358 static dw_loc_list_ref
20359 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20360 {
20361 int ix;
20362 dw_fde_ref fde;
20363 dw_loc_list_ref list, *list_tail;
20364 dw_cfi_ref cfi;
20365 dw_cfa_location last_cfa, next_cfa;
20366 const char *start_label, *last_label, *section;
20367 dw_cfa_location remember;
20368
20369 fde = cfun->fde;
20370 gcc_assert (fde != NULL);
20371
20372 section = secname_for_decl (current_function_decl);
20373 list_tail = &list;
20374 list = NULL;
20375
20376 memset (&next_cfa, 0, sizeof (next_cfa));
20377 next_cfa.reg = INVALID_REGNUM;
20378 remember = next_cfa;
20379
20380 start_label = fde->dw_fde_begin;
20381
20382 /* ??? Bald assumption that the CIE opcode list does not contain
20383 advance opcodes. */
20384 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20385 lookup_cfa_1 (cfi, &next_cfa, &remember);
20386
20387 last_cfa = next_cfa;
20388 last_label = start_label;
20389
20390 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20391 {
20392 /* If the first partition contained no CFI adjustments, the
20393 CIE opcodes apply to the whole first partition. */
20394 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20395 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20396 list_tail =&(*list_tail)->dw_loc_next;
20397 start_label = last_label = fde->dw_fde_second_begin;
20398 }
20399
20400 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20401 {
20402 switch (cfi->dw_cfi_opc)
20403 {
20404 case DW_CFA_set_loc:
20405 case DW_CFA_advance_loc1:
20406 case DW_CFA_advance_loc2:
20407 case DW_CFA_advance_loc4:
20408 if (!cfa_equal_p (&last_cfa, &next_cfa))
20409 {
20410 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20411 start_label, 0, last_label, 0, section);
20412
20413 list_tail = &(*list_tail)->dw_loc_next;
20414 last_cfa = next_cfa;
20415 start_label = last_label;
20416 }
20417 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20418 break;
20419
20420 case DW_CFA_advance_loc:
20421 /* The encoding is complex enough that we should never emit this. */
20422 gcc_unreachable ();
20423
20424 default:
20425 lookup_cfa_1 (cfi, &next_cfa, &remember);
20426 break;
20427 }
20428 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20429 {
20430 if (!cfa_equal_p (&last_cfa, &next_cfa))
20431 {
20432 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20433 start_label, 0, last_label, 0, section);
20434
20435 list_tail = &(*list_tail)->dw_loc_next;
20436 last_cfa = next_cfa;
20437 start_label = last_label;
20438 }
20439 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20440 start_label, 0, fde->dw_fde_end, 0, section);
20441 list_tail = &(*list_tail)->dw_loc_next;
20442 start_label = last_label = fde->dw_fde_second_begin;
20443 }
20444 }
20445
20446 if (!cfa_equal_p (&last_cfa, &next_cfa))
20447 {
20448 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20449 start_label, 0, last_label, 0, section);
20450 list_tail = &(*list_tail)->dw_loc_next;
20451 start_label = last_label;
20452 }
20453
20454 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20455 start_label, 0,
20456 fde->dw_fde_second_begin
20457 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20458 section);
20459
20460 maybe_gen_llsym (list);
20461
20462 return list;
20463 }
20464
20465 /* Compute a displacement from the "steady-state frame pointer" to the
20466 frame base (often the same as the CFA), and store it in
20467 frame_pointer_fb_offset. OFFSET is added to the displacement
20468 before the latter is negated. */
20469
20470 static void
20471 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20472 {
20473 rtx reg, elim;
20474
20475 #ifdef FRAME_POINTER_CFA_OFFSET
20476 reg = frame_pointer_rtx;
20477 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20478 #else
20479 reg = arg_pointer_rtx;
20480 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20481 #endif
20482
20483 elim = (ira_use_lra_p
20484 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20485 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20486 elim = strip_offset_and_add (elim, &offset);
20487
20488 frame_pointer_fb_offset = -offset;
20489
20490 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20491 in which to eliminate. This is because it's stack pointer isn't
20492 directly accessible as a register within the ISA. To work around
20493 this, assume that while we cannot provide a proper value for
20494 frame_pointer_fb_offset, we won't need one either. */
20495 frame_pointer_fb_offset_valid
20496 = ((SUPPORTS_STACK_ALIGNMENT
20497 && (elim == hard_frame_pointer_rtx
20498 || elim == stack_pointer_rtx))
20499 || elim == (frame_pointer_needed
20500 ? hard_frame_pointer_rtx
20501 : stack_pointer_rtx));
20502 }
20503
20504 /* Generate a DW_AT_name attribute given some string value to be included as
20505 the value of the attribute. */
20506
20507 static void
20508 add_name_attribute (dw_die_ref die, const char *name_string)
20509 {
20510 if (name_string != NULL && *name_string != 0)
20511 {
20512 if (demangle_name_func)
20513 name_string = (*demangle_name_func) (name_string);
20514
20515 add_AT_string (die, DW_AT_name, name_string);
20516 }
20517 }
20518
20519 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20520 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20521 of TYPE accordingly.
20522
20523 ??? This is a temporary measure until after we're able to generate
20524 regular DWARF for the complex Ada type system. */
20525
20526 static void
20527 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20528 dw_die_ref context_die)
20529 {
20530 tree dtype;
20531 dw_die_ref dtype_die;
20532
20533 if (!lang_hooks.types.descriptive_type)
20534 return;
20535
20536 dtype = lang_hooks.types.descriptive_type (type);
20537 if (!dtype)
20538 return;
20539
20540 dtype_die = lookup_type_die (dtype);
20541 if (!dtype_die)
20542 {
20543 gen_type_die (dtype, context_die);
20544 dtype_die = lookup_type_die (dtype);
20545 gcc_assert (dtype_die);
20546 }
20547
20548 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20549 }
20550
20551 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20552
20553 static const char *
20554 comp_dir_string (void)
20555 {
20556 const char *wd;
20557 char *wd1;
20558 static const char *cached_wd = NULL;
20559
20560 if (cached_wd != NULL)
20561 return cached_wd;
20562
20563 wd = get_src_pwd ();
20564 if (wd == NULL)
20565 return NULL;
20566
20567 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20568 {
20569 int wdlen;
20570
20571 wdlen = strlen (wd);
20572 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20573 strcpy (wd1, wd);
20574 wd1 [wdlen] = DIR_SEPARATOR;
20575 wd1 [wdlen + 1] = 0;
20576 wd = wd1;
20577 }
20578
20579 cached_wd = remap_debug_filename (wd);
20580 return cached_wd;
20581 }
20582
20583 /* Generate a DW_AT_comp_dir attribute for DIE. */
20584
20585 static void
20586 add_comp_dir_attribute (dw_die_ref die)
20587 {
20588 const char * wd = comp_dir_string ();
20589 if (wd != NULL)
20590 add_AT_string (die, DW_AT_comp_dir, wd);
20591 }
20592
20593 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20594 pointer computation, ...), output a representation for that bound according
20595 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20596 loc_list_from_tree for the meaning of CONTEXT. */
20597
20598 static void
20599 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20600 int forms, struct loc_descr_context *context)
20601 {
20602 dw_die_ref context_die, decl_die;
20603 dw_loc_list_ref list;
20604 bool strip_conversions = true;
20605 bool placeholder_seen = false;
20606
20607 while (strip_conversions)
20608 switch (TREE_CODE (value))
20609 {
20610 case ERROR_MARK:
20611 case SAVE_EXPR:
20612 return;
20613
20614 CASE_CONVERT:
20615 case VIEW_CONVERT_EXPR:
20616 value = TREE_OPERAND (value, 0);
20617 break;
20618
20619 default:
20620 strip_conversions = false;
20621 break;
20622 }
20623
20624 /* If possible and permitted, output the attribute as a constant. */
20625 if ((forms & dw_scalar_form_constant) != 0
20626 && TREE_CODE (value) == INTEGER_CST)
20627 {
20628 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20629
20630 /* If HOST_WIDE_INT is big enough then represent the bound as
20631 a constant value. We need to choose a form based on
20632 whether the type is signed or unsigned. We cannot just
20633 call add_AT_unsigned if the value itself is positive
20634 (add_AT_unsigned might add the unsigned value encoded as
20635 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20636 bounds type and then sign extend any unsigned values found
20637 for signed types. This is needed only for
20638 DW_AT_{lower,upper}_bound, since for most other attributes,
20639 consumers will treat DW_FORM_data[1248] as unsigned values,
20640 regardless of the underlying type. */
20641 if (prec <= HOST_BITS_PER_WIDE_INT
20642 || tree_fits_uhwi_p (value))
20643 {
20644 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20645 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20646 else
20647 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20648 }
20649 else
20650 /* Otherwise represent the bound as an unsigned value with
20651 the precision of its type. The precision and signedness
20652 of the type will be necessary to re-interpret it
20653 unambiguously. */
20654 add_AT_wide (die, attr, wi::to_wide (value));
20655 return;
20656 }
20657
20658 /* Otherwise, if it's possible and permitted too, output a reference to
20659 another DIE. */
20660 if ((forms & dw_scalar_form_reference) != 0)
20661 {
20662 tree decl = NULL_TREE;
20663
20664 /* Some type attributes reference an outer type. For instance, the upper
20665 bound of an array may reference an embedding record (this happens in
20666 Ada). */
20667 if (TREE_CODE (value) == COMPONENT_REF
20668 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20669 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20670 decl = TREE_OPERAND (value, 1);
20671
20672 else if (VAR_P (value)
20673 || TREE_CODE (value) == PARM_DECL
20674 || TREE_CODE (value) == RESULT_DECL)
20675 decl = value;
20676
20677 if (decl != NULL_TREE)
20678 {
20679 dw_die_ref decl_die = lookup_decl_die (decl);
20680
20681 /* ??? Can this happen, or should the variable have been bound
20682 first? Probably it can, since I imagine that we try to create
20683 the types of parameters in the order in which they exist in
20684 the list, and won't have created a forward reference to a
20685 later parameter. */
20686 if (decl_die != NULL)
20687 {
20688 add_AT_die_ref (die, attr, decl_die);
20689 return;
20690 }
20691 }
20692 }
20693
20694 /* Last chance: try to create a stack operation procedure to evaluate the
20695 value. Do nothing if even that is not possible or permitted. */
20696 if ((forms & dw_scalar_form_exprloc) == 0)
20697 return;
20698
20699 list = loc_list_from_tree (value, 2, context);
20700 if (context && context->placeholder_arg)
20701 {
20702 placeholder_seen = context->placeholder_seen;
20703 context->placeholder_seen = false;
20704 }
20705 if (list == NULL || single_element_loc_list_p (list))
20706 {
20707 /* If this attribute is not a reference nor constant, it is
20708 a DWARF expression rather than location description. For that
20709 loc_list_from_tree (value, 0, &context) is needed. */
20710 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20711 if (list2 && single_element_loc_list_p (list2))
20712 {
20713 if (placeholder_seen)
20714 {
20715 struct dwarf_procedure_info dpi;
20716 dpi.fndecl = NULL_TREE;
20717 dpi.args_count = 1;
20718 if (!resolve_args_picking (list2->expr, 1, &dpi))
20719 return;
20720 }
20721 add_AT_loc (die, attr, list2->expr);
20722 return;
20723 }
20724 }
20725
20726 /* If that failed to give a single element location list, fall back to
20727 outputting this as a reference... still if permitted. */
20728 if (list == NULL
20729 || (forms & dw_scalar_form_reference) == 0
20730 || placeholder_seen)
20731 return;
20732
20733 if (current_function_decl == 0)
20734 context_die = comp_unit_die ();
20735 else
20736 context_die = lookup_decl_die (current_function_decl);
20737
20738 decl_die = new_die (DW_TAG_variable, context_die, value);
20739 add_AT_flag (decl_die, DW_AT_artificial, 1);
20740 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20741 context_die);
20742 add_AT_location_description (decl_die, DW_AT_location, list);
20743 add_AT_die_ref (die, attr, decl_die);
20744 }
20745
20746 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20747 default. */
20748
20749 static int
20750 lower_bound_default (void)
20751 {
20752 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20753 {
20754 case DW_LANG_C:
20755 case DW_LANG_C89:
20756 case DW_LANG_C99:
20757 case DW_LANG_C11:
20758 case DW_LANG_C_plus_plus:
20759 case DW_LANG_C_plus_plus_11:
20760 case DW_LANG_C_plus_plus_14:
20761 case DW_LANG_ObjC:
20762 case DW_LANG_ObjC_plus_plus:
20763 return 0;
20764 case DW_LANG_Fortran77:
20765 case DW_LANG_Fortran90:
20766 case DW_LANG_Fortran95:
20767 case DW_LANG_Fortran03:
20768 case DW_LANG_Fortran08:
20769 return 1;
20770 case DW_LANG_UPC:
20771 case DW_LANG_D:
20772 case DW_LANG_Python:
20773 return dwarf_version >= 4 ? 0 : -1;
20774 case DW_LANG_Ada95:
20775 case DW_LANG_Ada83:
20776 case DW_LANG_Cobol74:
20777 case DW_LANG_Cobol85:
20778 case DW_LANG_Modula2:
20779 case DW_LANG_PLI:
20780 return dwarf_version >= 4 ? 1 : -1;
20781 default:
20782 return -1;
20783 }
20784 }
20785
20786 /* Given a tree node describing an array bound (either lower or upper) output
20787 a representation for that bound. */
20788
20789 static void
20790 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20791 tree bound, struct loc_descr_context *context)
20792 {
20793 int dflt;
20794
20795 while (1)
20796 switch (TREE_CODE (bound))
20797 {
20798 /* Strip all conversions. */
20799 CASE_CONVERT:
20800 case VIEW_CONVERT_EXPR:
20801 bound = TREE_OPERAND (bound, 0);
20802 break;
20803
20804 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20805 are even omitted when they are the default. */
20806 case INTEGER_CST:
20807 /* If the value for this bound is the default one, we can even omit the
20808 attribute. */
20809 if (bound_attr == DW_AT_lower_bound
20810 && tree_fits_shwi_p (bound)
20811 && (dflt = lower_bound_default ()) != -1
20812 && tree_to_shwi (bound) == dflt)
20813 return;
20814
20815 /* FALLTHRU */
20816
20817 default:
20818 /* Because of the complex interaction there can be with other GNAT
20819 encodings, GDB isn't ready yet to handle proper DWARF description
20820 for self-referencial subrange bounds: let GNAT encodings do the
20821 magic in such a case. */
20822 if (is_ada ()
20823 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20824 && contains_placeholder_p (bound))
20825 return;
20826
20827 add_scalar_info (subrange_die, bound_attr, bound,
20828 dw_scalar_form_constant
20829 | dw_scalar_form_exprloc
20830 | dw_scalar_form_reference,
20831 context);
20832 return;
20833 }
20834 }
20835
20836 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20837 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20838 Note that the block of subscript information for an array type also
20839 includes information about the element type of the given array type.
20840
20841 This function reuses previously set type and bound information if
20842 available. */
20843
20844 static void
20845 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20846 {
20847 unsigned dimension_number;
20848 tree lower, upper;
20849 dw_die_ref child = type_die->die_child;
20850
20851 for (dimension_number = 0;
20852 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20853 type = TREE_TYPE (type), dimension_number++)
20854 {
20855 tree domain = TYPE_DOMAIN (type);
20856
20857 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20858 break;
20859
20860 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20861 and (in GNU C only) variable bounds. Handle all three forms
20862 here. */
20863
20864 /* Find and reuse a previously generated DW_TAG_subrange_type if
20865 available.
20866
20867 For multi-dimensional arrays, as we iterate through the
20868 various dimensions in the enclosing for loop above, we also
20869 iterate through the DIE children and pick at each
20870 DW_TAG_subrange_type previously generated (if available).
20871 Each child DW_TAG_subrange_type DIE describes the range of
20872 the current dimension. At this point we should have as many
20873 DW_TAG_subrange_type's as we have dimensions in the
20874 array. */
20875 dw_die_ref subrange_die = NULL;
20876 if (child)
20877 while (1)
20878 {
20879 child = child->die_sib;
20880 if (child->die_tag == DW_TAG_subrange_type)
20881 subrange_die = child;
20882 if (child == type_die->die_child)
20883 {
20884 /* If we wrapped around, stop looking next time. */
20885 child = NULL;
20886 break;
20887 }
20888 if (child->die_tag == DW_TAG_subrange_type)
20889 break;
20890 }
20891 if (!subrange_die)
20892 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20893
20894 if (domain)
20895 {
20896 /* We have an array type with specified bounds. */
20897 lower = TYPE_MIN_VALUE (domain);
20898 upper = TYPE_MAX_VALUE (domain);
20899
20900 /* Define the index type. */
20901 if (TREE_TYPE (domain)
20902 && !get_AT (subrange_die, DW_AT_type))
20903 {
20904 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20905 TREE_TYPE field. We can't emit debug info for this
20906 because it is an unnamed integral type. */
20907 if (TREE_CODE (domain) == INTEGER_TYPE
20908 && TYPE_NAME (domain) == NULL_TREE
20909 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20910 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20911 ;
20912 else
20913 add_type_attribute (subrange_die, TREE_TYPE (domain),
20914 TYPE_UNQUALIFIED, false, type_die);
20915 }
20916
20917 /* ??? If upper is NULL, the array has unspecified length,
20918 but it does have a lower bound. This happens with Fortran
20919 dimension arr(N:*)
20920 Since the debugger is definitely going to need to know N
20921 to produce useful results, go ahead and output the lower
20922 bound solo, and hope the debugger can cope. */
20923
20924 if (!get_AT (subrange_die, DW_AT_lower_bound))
20925 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20926 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20927 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20928 }
20929
20930 /* Otherwise we have an array type with an unspecified length. The
20931 DWARF-2 spec does not say how to handle this; let's just leave out the
20932 bounds. */
20933 }
20934 }
20935
20936 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20937
20938 static void
20939 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20940 {
20941 dw_die_ref decl_die;
20942 HOST_WIDE_INT size;
20943 dw_loc_descr_ref size_expr = NULL;
20944
20945 switch (TREE_CODE (tree_node))
20946 {
20947 case ERROR_MARK:
20948 size = 0;
20949 break;
20950 case ENUMERAL_TYPE:
20951 case RECORD_TYPE:
20952 case UNION_TYPE:
20953 case QUAL_UNION_TYPE:
20954 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20955 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20956 {
20957 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20958 return;
20959 }
20960 size_expr = type_byte_size (tree_node, &size);
20961 break;
20962 case FIELD_DECL:
20963 /* For a data member of a struct or union, the DW_AT_byte_size is
20964 generally given as the number of bytes normally allocated for an
20965 object of the *declared* type of the member itself. This is true
20966 even for bit-fields. */
20967 size = int_size_in_bytes (field_type (tree_node));
20968 break;
20969 default:
20970 gcc_unreachable ();
20971 }
20972
20973 /* Support for dynamically-sized objects was introduced by DWARFv3.
20974 At the moment, GDB does not handle variable byte sizes very well,
20975 though. */
20976 if ((dwarf_version >= 3 || !dwarf_strict)
20977 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20978 && size_expr != NULL)
20979 add_AT_loc (die, DW_AT_byte_size, size_expr);
20980
20981 /* Note that `size' might be -1 when we get to this point. If it is, that
20982 indicates that the byte size of the entity in question is variable and
20983 that we could not generate a DWARF expression that computes it. */
20984 if (size >= 0)
20985 add_AT_unsigned (die, DW_AT_byte_size, size);
20986 }
20987
20988 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20989 alignment. */
20990
20991 static void
20992 add_alignment_attribute (dw_die_ref die, tree tree_node)
20993 {
20994 if (dwarf_version < 5 && dwarf_strict)
20995 return;
20996
20997 unsigned align;
20998
20999 if (DECL_P (tree_node))
21000 {
21001 if (!DECL_USER_ALIGN (tree_node))
21002 return;
21003
21004 align = DECL_ALIGN_UNIT (tree_node);
21005 }
21006 else if (TYPE_P (tree_node))
21007 {
21008 if (!TYPE_USER_ALIGN (tree_node))
21009 return;
21010
21011 align = TYPE_ALIGN_UNIT (tree_node);
21012 }
21013 else
21014 gcc_unreachable ();
21015
21016 add_AT_unsigned (die, DW_AT_alignment, align);
21017 }
21018
21019 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21020 which specifies the distance in bits from the highest order bit of the
21021 "containing object" for the bit-field to the highest order bit of the
21022 bit-field itself.
21023
21024 For any given bit-field, the "containing object" is a hypothetical object
21025 (of some integral or enum type) within which the given bit-field lives. The
21026 type of this hypothetical "containing object" is always the same as the
21027 declared type of the individual bit-field itself. The determination of the
21028 exact location of the "containing object" for a bit-field is rather
21029 complicated. It's handled by the `field_byte_offset' function (above).
21030
21031 CTX is required: see the comment for VLR_CONTEXT.
21032
21033 Note that it is the size (in bytes) of the hypothetical "containing object"
21034 which will be given in the DW_AT_byte_size attribute for this bit-field.
21035 (See `byte_size_attribute' above). */
21036
21037 static inline void
21038 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21039 {
21040 HOST_WIDE_INT object_offset_in_bytes;
21041 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21042 HOST_WIDE_INT bitpos_int;
21043 HOST_WIDE_INT highest_order_object_bit_offset;
21044 HOST_WIDE_INT highest_order_field_bit_offset;
21045 HOST_WIDE_INT bit_offset;
21046
21047 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21048
21049 /* Must be a field and a bit field. */
21050 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21051
21052 /* We can't yet handle bit-fields whose offsets are variable, so if we
21053 encounter such things, just return without generating any attribute
21054 whatsoever. Likewise for variable or too large size. */
21055 if (! tree_fits_shwi_p (bit_position (decl))
21056 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21057 return;
21058
21059 bitpos_int = int_bit_position (decl);
21060
21061 /* Note that the bit offset is always the distance (in bits) from the
21062 highest-order bit of the "containing object" to the highest-order bit of
21063 the bit-field itself. Since the "high-order end" of any object or field
21064 is different on big-endian and little-endian machines, the computation
21065 below must take account of these differences. */
21066 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21067 highest_order_field_bit_offset = bitpos_int;
21068
21069 if (! BYTES_BIG_ENDIAN)
21070 {
21071 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21072 highest_order_object_bit_offset +=
21073 simple_type_size_in_bits (original_type);
21074 }
21075
21076 bit_offset
21077 = (! BYTES_BIG_ENDIAN
21078 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21079 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21080
21081 if (bit_offset < 0)
21082 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21083 else
21084 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21085 }
21086
21087 /* For a FIELD_DECL node which represents a bit field, output an attribute
21088 which specifies the length in bits of the given field. */
21089
21090 static inline void
21091 add_bit_size_attribute (dw_die_ref die, tree decl)
21092 {
21093 /* Must be a field and a bit field. */
21094 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21095 && DECL_BIT_FIELD_TYPE (decl));
21096
21097 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21098 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21099 }
21100
21101 /* If the compiled language is ANSI C, then add a 'prototyped'
21102 attribute, if arg types are given for the parameters of a function. */
21103
21104 static inline void
21105 add_prototyped_attribute (dw_die_ref die, tree func_type)
21106 {
21107 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21108 {
21109 case DW_LANG_C:
21110 case DW_LANG_C89:
21111 case DW_LANG_C99:
21112 case DW_LANG_C11:
21113 case DW_LANG_ObjC:
21114 if (prototype_p (func_type))
21115 add_AT_flag (die, DW_AT_prototyped, 1);
21116 break;
21117 default:
21118 break;
21119 }
21120 }
21121
21122 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21123 by looking in the type declaration, the object declaration equate table or
21124 the block mapping. */
21125
21126 static inline dw_die_ref
21127 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21128 {
21129 dw_die_ref origin_die = NULL;
21130
21131 if (DECL_P (origin))
21132 {
21133 dw_die_ref c;
21134 origin_die = lookup_decl_die (origin);
21135 /* "Unwrap" the decls DIE which we put in the imported unit context.
21136 We are looking for the abstract copy here. */
21137 if (in_lto_p
21138 && origin_die
21139 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
21140 /* ??? Identify this better. */
21141 && c->with_offset)
21142 origin_die = c;
21143 }
21144 else if (TYPE_P (origin))
21145 origin_die = lookup_type_die (origin);
21146 else if (TREE_CODE (origin) == BLOCK)
21147 origin_die = BLOCK_DIE (origin);
21148
21149 /* XXX: Functions that are never lowered don't always have correct block
21150 trees (in the case of java, they simply have no block tree, in some other
21151 languages). For these functions, there is nothing we can really do to
21152 output correct debug info for inlined functions in all cases. Rather
21153 than die, we'll just produce deficient debug info now, in that we will
21154 have variables without a proper abstract origin. In the future, when all
21155 functions are lowered, we should re-add a gcc_assert (origin_die)
21156 here. */
21157
21158 if (origin_die)
21159 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21160 return origin_die;
21161 }
21162
21163 /* We do not currently support the pure_virtual attribute. */
21164
21165 static inline void
21166 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21167 {
21168 if (DECL_VINDEX (func_decl))
21169 {
21170 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21171
21172 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21173 add_AT_loc (die, DW_AT_vtable_elem_location,
21174 new_loc_descr (DW_OP_constu,
21175 tree_to_shwi (DECL_VINDEX (func_decl)),
21176 0));
21177
21178 /* GNU extension: Record what type this method came from originally. */
21179 if (debug_info_level > DINFO_LEVEL_TERSE
21180 && DECL_CONTEXT (func_decl))
21181 add_AT_die_ref (die, DW_AT_containing_type,
21182 lookup_type_die (DECL_CONTEXT (func_decl)));
21183 }
21184 }
21185 \f
21186 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21187 given decl. This used to be a vendor extension until after DWARF 4
21188 standardized it. */
21189
21190 static void
21191 add_linkage_attr (dw_die_ref die, tree decl)
21192 {
21193 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21194
21195 /* Mimic what assemble_name_raw does with a leading '*'. */
21196 if (name[0] == '*')
21197 name = &name[1];
21198
21199 if (dwarf_version >= 4)
21200 add_AT_string (die, DW_AT_linkage_name, name);
21201 else
21202 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21203 }
21204
21205 /* Add source coordinate attributes for the given decl. */
21206
21207 static void
21208 add_src_coords_attributes (dw_die_ref die, tree decl)
21209 {
21210 expanded_location s;
21211
21212 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21213 return;
21214 s = expand_location (DECL_SOURCE_LOCATION (decl));
21215 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21216 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21217 if (debug_column_info && s.column)
21218 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21219 }
21220
21221 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21222
21223 static void
21224 add_linkage_name_raw (dw_die_ref die, tree decl)
21225 {
21226 /* Defer until we have an assembler name set. */
21227 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21228 {
21229 limbo_die_node *asm_name;
21230
21231 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21232 asm_name->die = die;
21233 asm_name->created_for = decl;
21234 asm_name->next = deferred_asm_name;
21235 deferred_asm_name = asm_name;
21236 }
21237 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21238 add_linkage_attr (die, decl);
21239 }
21240
21241 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21242
21243 static void
21244 add_linkage_name (dw_die_ref die, tree decl)
21245 {
21246 if (debug_info_level > DINFO_LEVEL_NONE
21247 && VAR_OR_FUNCTION_DECL_P (decl)
21248 && TREE_PUBLIC (decl)
21249 && !(VAR_P (decl) && DECL_REGISTER (decl))
21250 && die->die_tag != DW_TAG_member)
21251 add_linkage_name_raw (die, decl);
21252 }
21253
21254 /* Add a DW_AT_name attribute and source coordinate attribute for the
21255 given decl, but only if it actually has a name. */
21256
21257 static void
21258 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21259 bool no_linkage_name)
21260 {
21261 tree decl_name;
21262
21263 decl_name = DECL_NAME (decl);
21264 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21265 {
21266 const char *name = dwarf2_name (decl, 0);
21267 if (name)
21268 add_name_attribute (die, name);
21269 if (! DECL_ARTIFICIAL (decl))
21270 add_src_coords_attributes (die, decl);
21271
21272 if (!no_linkage_name)
21273 add_linkage_name (die, decl);
21274 }
21275
21276 #ifdef VMS_DEBUGGING_INFO
21277 /* Get the function's name, as described by its RTL. This may be different
21278 from the DECL_NAME name used in the source file. */
21279 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21280 {
21281 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21282 XEXP (DECL_RTL (decl), 0), false);
21283 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21284 }
21285 #endif /* VMS_DEBUGGING_INFO */
21286 }
21287
21288 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21289
21290 static void
21291 add_discr_value (dw_die_ref die, dw_discr_value *value)
21292 {
21293 dw_attr_node attr;
21294
21295 attr.dw_attr = DW_AT_discr_value;
21296 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21297 attr.dw_attr_val.val_entry = NULL;
21298 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21299 if (value->pos)
21300 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21301 else
21302 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21303 add_dwarf_attr (die, &attr);
21304 }
21305
21306 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21307
21308 static void
21309 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21310 {
21311 dw_attr_node attr;
21312
21313 attr.dw_attr = DW_AT_discr_list;
21314 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21315 attr.dw_attr_val.val_entry = NULL;
21316 attr.dw_attr_val.v.val_discr_list = discr_list;
21317 add_dwarf_attr (die, &attr);
21318 }
21319
21320 static inline dw_discr_list_ref
21321 AT_discr_list (dw_attr_node *attr)
21322 {
21323 return attr->dw_attr_val.v.val_discr_list;
21324 }
21325
21326 #ifdef VMS_DEBUGGING_INFO
21327 /* Output the debug main pointer die for VMS */
21328
21329 void
21330 dwarf2out_vms_debug_main_pointer (void)
21331 {
21332 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21333 dw_die_ref die;
21334
21335 /* Allocate the VMS debug main subprogram die. */
21336 die = new_die_raw (DW_TAG_subprogram);
21337 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21338 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21339 current_function_funcdef_no);
21340 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21341
21342 /* Make it the first child of comp_unit_die (). */
21343 die->die_parent = comp_unit_die ();
21344 if (comp_unit_die ()->die_child)
21345 {
21346 die->die_sib = comp_unit_die ()->die_child->die_sib;
21347 comp_unit_die ()->die_child->die_sib = die;
21348 }
21349 else
21350 {
21351 die->die_sib = die;
21352 comp_unit_die ()->die_child = die;
21353 }
21354 }
21355 #endif /* VMS_DEBUGGING_INFO */
21356
21357 /* walk_tree helper function for uses_local_type, below. */
21358
21359 static tree
21360 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21361 {
21362 if (!TYPE_P (*tp))
21363 *walk_subtrees = 0;
21364 else
21365 {
21366 tree name = TYPE_NAME (*tp);
21367 if (name && DECL_P (name) && decl_function_context (name))
21368 return *tp;
21369 }
21370 return NULL_TREE;
21371 }
21372
21373 /* If TYPE involves a function-local type (including a local typedef to a
21374 non-local type), returns that type; otherwise returns NULL_TREE. */
21375
21376 static tree
21377 uses_local_type (tree type)
21378 {
21379 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21380 return used;
21381 }
21382
21383 /* Return the DIE for the scope that immediately contains this type.
21384 Non-named types that do not involve a function-local type get global
21385 scope. Named types nested in namespaces or other types get their
21386 containing scope. All other types (i.e. function-local named types) get
21387 the current active scope. */
21388
21389 static dw_die_ref
21390 scope_die_for (tree t, dw_die_ref context_die)
21391 {
21392 dw_die_ref scope_die = NULL;
21393 tree containing_scope;
21394
21395 /* Non-types always go in the current scope. */
21396 gcc_assert (TYPE_P (t));
21397
21398 /* Use the scope of the typedef, rather than the scope of the type
21399 it refers to. */
21400 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21401 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21402 else
21403 containing_scope = TYPE_CONTEXT (t);
21404
21405 /* Use the containing namespace if there is one. */
21406 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21407 {
21408 if (context_die == lookup_decl_die (containing_scope))
21409 /* OK */;
21410 else if (debug_info_level > DINFO_LEVEL_TERSE)
21411 context_die = get_context_die (containing_scope);
21412 else
21413 containing_scope = NULL_TREE;
21414 }
21415
21416 /* Ignore function type "scopes" from the C frontend. They mean that
21417 a tagged type is local to a parmlist of a function declarator, but
21418 that isn't useful to DWARF. */
21419 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21420 containing_scope = NULL_TREE;
21421
21422 if (SCOPE_FILE_SCOPE_P (containing_scope))
21423 {
21424 /* If T uses a local type keep it local as well, to avoid references
21425 to function-local DIEs from outside the function. */
21426 if (current_function_decl && uses_local_type (t))
21427 scope_die = context_die;
21428 else
21429 scope_die = comp_unit_die ();
21430 }
21431 else if (TYPE_P (containing_scope))
21432 {
21433 /* For types, we can just look up the appropriate DIE. */
21434 if (debug_info_level > DINFO_LEVEL_TERSE)
21435 scope_die = get_context_die (containing_scope);
21436 else
21437 {
21438 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21439 if (scope_die == NULL)
21440 scope_die = comp_unit_die ();
21441 }
21442 }
21443 else
21444 scope_die = context_die;
21445
21446 return scope_die;
21447 }
21448
21449 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21450
21451 static inline int
21452 local_scope_p (dw_die_ref context_die)
21453 {
21454 for (; context_die; context_die = context_die->die_parent)
21455 if (context_die->die_tag == DW_TAG_inlined_subroutine
21456 || context_die->die_tag == DW_TAG_subprogram)
21457 return 1;
21458
21459 return 0;
21460 }
21461
21462 /* Returns nonzero if CONTEXT_DIE is a class. */
21463
21464 static inline int
21465 class_scope_p (dw_die_ref context_die)
21466 {
21467 return (context_die
21468 && (context_die->die_tag == DW_TAG_structure_type
21469 || context_die->die_tag == DW_TAG_class_type
21470 || context_die->die_tag == DW_TAG_interface_type
21471 || context_die->die_tag == DW_TAG_union_type));
21472 }
21473
21474 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21475 whether or not to treat a DIE in this context as a declaration. */
21476
21477 static inline int
21478 class_or_namespace_scope_p (dw_die_ref context_die)
21479 {
21480 return (class_scope_p (context_die)
21481 || (context_die && context_die->die_tag == DW_TAG_namespace));
21482 }
21483
21484 /* Many forms of DIEs require a "type description" attribute. This
21485 routine locates the proper "type descriptor" die for the type given
21486 by 'type' plus any additional qualifiers given by 'cv_quals', and
21487 adds a DW_AT_type attribute below the given die. */
21488
21489 static void
21490 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21491 bool reverse, dw_die_ref context_die)
21492 {
21493 enum tree_code code = TREE_CODE (type);
21494 dw_die_ref type_die = NULL;
21495
21496 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21497 or fixed-point type, use the inner type. This is because we have no
21498 support for unnamed types in base_type_die. This can happen if this is
21499 an Ada subrange type. Correct solution is emit a subrange type die. */
21500 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21501 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21502 type = TREE_TYPE (type), code = TREE_CODE (type);
21503
21504 if (code == ERROR_MARK
21505 /* Handle a special case. For functions whose return type is void, we
21506 generate *no* type attribute. (Note that no object may have type
21507 `void', so this only applies to function return types). */
21508 || code == VOID_TYPE)
21509 return;
21510
21511 type_die = modified_type_die (type,
21512 cv_quals | TYPE_QUALS (type),
21513 reverse,
21514 context_die);
21515
21516 if (type_die != NULL)
21517 add_AT_die_ref (object_die, DW_AT_type, type_die);
21518 }
21519
21520 /* Given an object die, add the calling convention attribute for the
21521 function call type. */
21522 static void
21523 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21524 {
21525 enum dwarf_calling_convention value = DW_CC_normal;
21526
21527 value = ((enum dwarf_calling_convention)
21528 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21529
21530 if (is_fortran ()
21531 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21532 {
21533 /* DWARF 2 doesn't provide a way to identify a program's source-level
21534 entry point. DW_AT_calling_convention attributes are only meant
21535 to describe functions' calling conventions. However, lacking a
21536 better way to signal the Fortran main program, we used this for
21537 a long time, following existing custom. Now, DWARF 4 has
21538 DW_AT_main_subprogram, which we add below, but some tools still
21539 rely on the old way, which we thus keep. */
21540 value = DW_CC_program;
21541
21542 if (dwarf_version >= 4 || !dwarf_strict)
21543 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21544 }
21545
21546 /* Only add the attribute if the backend requests it, and
21547 is not DW_CC_normal. */
21548 if (value && (value != DW_CC_normal))
21549 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21550 }
21551
21552 /* Given a tree pointer to a struct, class, union, or enum type node, return
21553 a pointer to the (string) tag name for the given type, or zero if the type
21554 was declared without a tag. */
21555
21556 static const char *
21557 type_tag (const_tree type)
21558 {
21559 const char *name = 0;
21560
21561 if (TYPE_NAME (type) != 0)
21562 {
21563 tree t = 0;
21564
21565 /* Find the IDENTIFIER_NODE for the type name. */
21566 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21567 && !TYPE_NAMELESS (type))
21568 t = TYPE_NAME (type);
21569
21570 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21571 a TYPE_DECL node, regardless of whether or not a `typedef' was
21572 involved. */
21573 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21574 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21575 {
21576 /* We want to be extra verbose. Don't call dwarf_name if
21577 DECL_NAME isn't set. The default hook for decl_printable_name
21578 doesn't like that, and in this context it's correct to return
21579 0, instead of "<anonymous>" or the like. */
21580 if (DECL_NAME (TYPE_NAME (type))
21581 && !DECL_NAMELESS (TYPE_NAME (type)))
21582 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21583 }
21584
21585 /* Now get the name as a string, or invent one. */
21586 if (!name && t != 0)
21587 name = IDENTIFIER_POINTER (t);
21588 }
21589
21590 return (name == 0 || *name == '\0') ? 0 : name;
21591 }
21592
21593 /* Return the type associated with a data member, make a special check
21594 for bit field types. */
21595
21596 static inline tree
21597 member_declared_type (const_tree member)
21598 {
21599 return (DECL_BIT_FIELD_TYPE (member)
21600 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21601 }
21602
21603 /* Get the decl's label, as described by its RTL. This may be different
21604 from the DECL_NAME name used in the source file. */
21605
21606 #if 0
21607 static const char *
21608 decl_start_label (tree decl)
21609 {
21610 rtx x;
21611 const char *fnname;
21612
21613 x = DECL_RTL (decl);
21614 gcc_assert (MEM_P (x));
21615
21616 x = XEXP (x, 0);
21617 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21618
21619 fnname = XSTR (x, 0);
21620 return fnname;
21621 }
21622 #endif
21623 \f
21624 /* For variable-length arrays that have been previously generated, but
21625 may be incomplete due to missing subscript info, fill the subscript
21626 info. Return TRUE if this is one of those cases. */
21627 static bool
21628 fill_variable_array_bounds (tree type)
21629 {
21630 if (TREE_ASM_WRITTEN (type)
21631 && TREE_CODE (type) == ARRAY_TYPE
21632 && variably_modified_type_p (type, NULL))
21633 {
21634 dw_die_ref array_die = lookup_type_die (type);
21635 if (!array_die)
21636 return false;
21637 add_subscript_info (array_die, type, !is_ada ());
21638 return true;
21639 }
21640 return false;
21641 }
21642
21643 /* These routines generate the internal representation of the DIE's for
21644 the compilation unit. Debugging information is collected by walking
21645 the declaration trees passed in from dwarf2out_decl(). */
21646
21647 static void
21648 gen_array_type_die (tree type, dw_die_ref context_die)
21649 {
21650 dw_die_ref array_die;
21651
21652 /* GNU compilers represent multidimensional array types as sequences of one
21653 dimensional array types whose element types are themselves array types.
21654 We sometimes squish that down to a single array_type DIE with multiple
21655 subscripts in the Dwarf debugging info. The draft Dwarf specification
21656 say that we are allowed to do this kind of compression in C, because
21657 there is no difference between an array of arrays and a multidimensional
21658 array. We don't do this for Ada to remain as close as possible to the
21659 actual representation, which is especially important against the language
21660 flexibilty wrt arrays of variable size. */
21661
21662 bool collapse_nested_arrays = !is_ada ();
21663
21664 if (fill_variable_array_bounds (type))
21665 return;
21666
21667 dw_die_ref scope_die = scope_die_for (type, context_die);
21668 tree element_type;
21669
21670 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21671 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21672 if (TYPE_STRING_FLAG (type)
21673 && TREE_CODE (type) == ARRAY_TYPE
21674 && is_fortran ()
21675 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21676 {
21677 HOST_WIDE_INT size;
21678
21679 array_die = new_die (DW_TAG_string_type, scope_die, type);
21680 add_name_attribute (array_die, type_tag (type));
21681 equate_type_number_to_die (type, array_die);
21682 size = int_size_in_bytes (type);
21683 if (size >= 0)
21684 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21685 /* ??? We can't annotate types late, but for LTO we may not
21686 generate a location early either (gfortran.dg/save_6.f90). */
21687 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21688 && TYPE_DOMAIN (type) != NULL_TREE
21689 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21690 {
21691 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21692 tree rszdecl = szdecl;
21693
21694 size = int_size_in_bytes (TREE_TYPE (szdecl));
21695 if (!DECL_P (szdecl))
21696 {
21697 if (TREE_CODE (szdecl) == INDIRECT_REF
21698 && DECL_P (TREE_OPERAND (szdecl, 0)))
21699 {
21700 rszdecl = TREE_OPERAND (szdecl, 0);
21701 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21702 != DWARF2_ADDR_SIZE)
21703 size = 0;
21704 }
21705 else
21706 size = 0;
21707 }
21708 if (size > 0)
21709 {
21710 dw_loc_list_ref loc
21711 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21712 NULL);
21713 if (loc)
21714 {
21715 add_AT_location_description (array_die, DW_AT_string_length,
21716 loc);
21717 if (size != DWARF2_ADDR_SIZE)
21718 add_AT_unsigned (array_die, dwarf_version >= 5
21719 ? DW_AT_string_length_byte_size
21720 : DW_AT_byte_size, size);
21721 }
21722 }
21723 }
21724 return;
21725 }
21726
21727 array_die = new_die (DW_TAG_array_type, scope_die, type);
21728 add_name_attribute (array_die, type_tag (type));
21729 equate_type_number_to_die (type, array_die);
21730
21731 if (TREE_CODE (type) == VECTOR_TYPE)
21732 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21733
21734 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21735 if (is_fortran ()
21736 && TREE_CODE (type) == ARRAY_TYPE
21737 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21738 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21739 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21740
21741 #if 0
21742 /* We default the array ordering. Debuggers will probably do the right
21743 things even if DW_AT_ordering is not present. It's not even an issue
21744 until we start to get into multidimensional arrays anyway. If a debugger
21745 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21746 then we'll have to put the DW_AT_ordering attribute back in. (But if
21747 and when we find out that we need to put these in, we will only do so
21748 for multidimensional arrays. */
21749 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21750 #endif
21751
21752 if (TREE_CODE (type) == VECTOR_TYPE)
21753 {
21754 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21755 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21756 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21757 add_bound_info (subrange_die, DW_AT_upper_bound,
21758 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21759 }
21760 else
21761 add_subscript_info (array_die, type, collapse_nested_arrays);
21762
21763 /* Add representation of the type of the elements of this array type and
21764 emit the corresponding DIE if we haven't done it already. */
21765 element_type = TREE_TYPE (type);
21766 if (collapse_nested_arrays)
21767 while (TREE_CODE (element_type) == ARRAY_TYPE)
21768 {
21769 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21770 break;
21771 element_type = TREE_TYPE (element_type);
21772 }
21773
21774 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21775 TREE_CODE (type) == ARRAY_TYPE
21776 && TYPE_REVERSE_STORAGE_ORDER (type),
21777 context_die);
21778
21779 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21780 if (TYPE_ARTIFICIAL (type))
21781 add_AT_flag (array_die, DW_AT_artificial, 1);
21782
21783 if (get_AT (array_die, DW_AT_name))
21784 add_pubtype (type, array_die);
21785
21786 add_alignment_attribute (array_die, type);
21787 }
21788
21789 /* This routine generates DIE for array with hidden descriptor, details
21790 are filled into *info by a langhook. */
21791
21792 static void
21793 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21794 dw_die_ref context_die)
21795 {
21796 const dw_die_ref scope_die = scope_die_for (type, context_die);
21797 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21798 struct loc_descr_context context = { type, info->base_decl, NULL,
21799 false, false };
21800 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21801 int dim;
21802
21803 add_name_attribute (array_die, type_tag (type));
21804 equate_type_number_to_die (type, array_die);
21805
21806 if (info->ndimensions > 1)
21807 switch (info->ordering)
21808 {
21809 case array_descr_ordering_row_major:
21810 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21811 break;
21812 case array_descr_ordering_column_major:
21813 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21814 break;
21815 default:
21816 break;
21817 }
21818
21819 if (dwarf_version >= 3 || !dwarf_strict)
21820 {
21821 if (info->data_location)
21822 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21823 dw_scalar_form_exprloc, &context);
21824 if (info->associated)
21825 add_scalar_info (array_die, DW_AT_associated, info->associated,
21826 dw_scalar_form_constant
21827 | dw_scalar_form_exprloc
21828 | dw_scalar_form_reference, &context);
21829 if (info->allocated)
21830 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21831 dw_scalar_form_constant
21832 | dw_scalar_form_exprloc
21833 | dw_scalar_form_reference, &context);
21834 if (info->stride)
21835 {
21836 const enum dwarf_attribute attr
21837 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21838 const int forms
21839 = (info->stride_in_bits)
21840 ? dw_scalar_form_constant
21841 : (dw_scalar_form_constant
21842 | dw_scalar_form_exprloc
21843 | dw_scalar_form_reference);
21844
21845 add_scalar_info (array_die, attr, info->stride, forms, &context);
21846 }
21847 }
21848 if (dwarf_version >= 5)
21849 {
21850 if (info->rank)
21851 {
21852 add_scalar_info (array_die, DW_AT_rank, info->rank,
21853 dw_scalar_form_constant
21854 | dw_scalar_form_exprloc, &context);
21855 subrange_tag = DW_TAG_generic_subrange;
21856 context.placeholder_arg = true;
21857 }
21858 }
21859
21860 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21861
21862 for (dim = 0; dim < info->ndimensions; dim++)
21863 {
21864 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21865
21866 if (info->dimen[dim].bounds_type)
21867 add_type_attribute (subrange_die,
21868 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21869 false, context_die);
21870 if (info->dimen[dim].lower_bound)
21871 add_bound_info (subrange_die, DW_AT_lower_bound,
21872 info->dimen[dim].lower_bound, &context);
21873 if (info->dimen[dim].upper_bound)
21874 add_bound_info (subrange_die, DW_AT_upper_bound,
21875 info->dimen[dim].upper_bound, &context);
21876 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21877 add_scalar_info (subrange_die, DW_AT_byte_stride,
21878 info->dimen[dim].stride,
21879 dw_scalar_form_constant
21880 | dw_scalar_form_exprloc
21881 | dw_scalar_form_reference,
21882 &context);
21883 }
21884
21885 gen_type_die (info->element_type, context_die);
21886 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21887 TREE_CODE (type) == ARRAY_TYPE
21888 && TYPE_REVERSE_STORAGE_ORDER (type),
21889 context_die);
21890
21891 if (get_AT (array_die, DW_AT_name))
21892 add_pubtype (type, array_die);
21893
21894 add_alignment_attribute (array_die, type);
21895 }
21896
21897 #if 0
21898 static void
21899 gen_entry_point_die (tree decl, dw_die_ref context_die)
21900 {
21901 tree origin = decl_ultimate_origin (decl);
21902 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21903
21904 if (origin != NULL)
21905 add_abstract_origin_attribute (decl_die, origin);
21906 else
21907 {
21908 add_name_and_src_coords_attributes (decl_die, decl);
21909 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21910 TYPE_UNQUALIFIED, false, context_die);
21911 }
21912
21913 if (DECL_ABSTRACT_P (decl))
21914 equate_decl_number_to_die (decl, decl_die);
21915 else
21916 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21917 }
21918 #endif
21919
21920 /* Walk through the list of incomplete types again, trying once more to
21921 emit full debugging info for them. */
21922
21923 static void
21924 retry_incomplete_types (void)
21925 {
21926 set_early_dwarf s;
21927 int i;
21928
21929 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21930 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21931 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21932 vec_safe_truncate (incomplete_types, 0);
21933 }
21934
21935 /* Determine what tag to use for a record type. */
21936
21937 static enum dwarf_tag
21938 record_type_tag (tree type)
21939 {
21940 if (! lang_hooks.types.classify_record)
21941 return DW_TAG_structure_type;
21942
21943 switch (lang_hooks.types.classify_record (type))
21944 {
21945 case RECORD_IS_STRUCT:
21946 return DW_TAG_structure_type;
21947
21948 case RECORD_IS_CLASS:
21949 return DW_TAG_class_type;
21950
21951 case RECORD_IS_INTERFACE:
21952 if (dwarf_version >= 3 || !dwarf_strict)
21953 return DW_TAG_interface_type;
21954 return DW_TAG_structure_type;
21955
21956 default:
21957 gcc_unreachable ();
21958 }
21959 }
21960
21961 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21962 include all of the information about the enumeration values also. Each
21963 enumerated type name/value is listed as a child of the enumerated type
21964 DIE. */
21965
21966 static dw_die_ref
21967 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21968 {
21969 dw_die_ref type_die = lookup_type_die (type);
21970 dw_die_ref orig_type_die = type_die;
21971
21972 if (type_die == NULL)
21973 {
21974 type_die = new_die (DW_TAG_enumeration_type,
21975 scope_die_for (type, context_die), type);
21976 equate_type_number_to_die (type, type_die);
21977 add_name_attribute (type_die, type_tag (type));
21978 if ((dwarf_version >= 4 || !dwarf_strict)
21979 && ENUM_IS_SCOPED (type))
21980 add_AT_flag (type_die, DW_AT_enum_class, 1);
21981 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
21982 add_AT_flag (type_die, DW_AT_declaration, 1);
21983 if (!dwarf_strict)
21984 add_AT_unsigned (type_die, DW_AT_encoding,
21985 TYPE_UNSIGNED (type)
21986 ? DW_ATE_unsigned
21987 : DW_ATE_signed);
21988 }
21989 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
21990 return type_die;
21991 else
21992 remove_AT (type_die, DW_AT_declaration);
21993
21994 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
21995 given enum type is incomplete, do not generate the DW_AT_byte_size
21996 attribute or the DW_AT_element_list attribute. */
21997 if (TYPE_SIZE (type))
21998 {
21999 tree link;
22000
22001 if (!ENUM_IS_OPAQUE (type))
22002 TREE_ASM_WRITTEN (type) = 1;
22003 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22004 add_byte_size_attribute (type_die, type);
22005 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22006 add_alignment_attribute (type_die, type);
22007 if ((dwarf_version >= 3 || !dwarf_strict)
22008 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22009 {
22010 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22011 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22012 context_die);
22013 }
22014 if (TYPE_STUB_DECL (type) != NULL_TREE)
22015 {
22016 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22017 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22018 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22019 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22020 }
22021
22022 /* If the first reference to this type was as the return type of an
22023 inline function, then it may not have a parent. Fix this now. */
22024 if (type_die->die_parent == NULL)
22025 add_child_die (scope_die_for (type, context_die), type_die);
22026
22027 for (link = TYPE_VALUES (type);
22028 link != NULL; link = TREE_CHAIN (link))
22029 {
22030 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22031 tree value = TREE_VALUE (link);
22032
22033 gcc_assert (!ENUM_IS_OPAQUE (type));
22034 add_name_attribute (enum_die,
22035 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22036
22037 if (TREE_CODE (value) == CONST_DECL)
22038 value = DECL_INITIAL (value);
22039
22040 if (simple_type_size_in_bits (TREE_TYPE (value))
22041 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22042 {
22043 /* For constant forms created by add_AT_unsigned DWARF
22044 consumers (GDB, elfutils, etc.) always zero extend
22045 the value. Only when the actual value is negative
22046 do we need to use add_AT_int to generate a constant
22047 form that can represent negative values. */
22048 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22049 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22050 add_AT_unsigned (enum_die, DW_AT_const_value,
22051 (unsigned HOST_WIDE_INT) val);
22052 else
22053 add_AT_int (enum_die, DW_AT_const_value, val);
22054 }
22055 else
22056 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22057 that here. TODO: This should be re-worked to use correct
22058 signed/unsigned double tags for all cases. */
22059 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22060 }
22061
22062 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22063 if (TYPE_ARTIFICIAL (type)
22064 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22065 add_AT_flag (type_die, DW_AT_artificial, 1);
22066 }
22067 else
22068 add_AT_flag (type_die, DW_AT_declaration, 1);
22069
22070 add_pubtype (type, type_die);
22071
22072 return type_die;
22073 }
22074
22075 /* Generate a DIE to represent either a real live formal parameter decl or to
22076 represent just the type of some formal parameter position in some function
22077 type.
22078
22079 Note that this routine is a bit unusual because its argument may be a
22080 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22081 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22082 node. If it's the former then this function is being called to output a
22083 DIE to represent a formal parameter object (or some inlining thereof). If
22084 it's the latter, then this function is only being called to output a
22085 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22086 argument type of some subprogram type.
22087 If EMIT_NAME_P is true, name and source coordinate attributes
22088 are emitted. */
22089
22090 static dw_die_ref
22091 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22092 dw_die_ref context_die)
22093 {
22094 tree node_or_origin = node ? node : origin;
22095 tree ultimate_origin;
22096 dw_die_ref parm_die = NULL;
22097
22098 if (DECL_P (node_or_origin))
22099 {
22100 parm_die = lookup_decl_die (node);
22101
22102 /* If the contexts differ, we may not be talking about the same
22103 thing.
22104 ??? When in LTO the DIE parent is the "abstract" copy and the
22105 context_die is the specification "copy". But this whole block
22106 should eventually be no longer needed. */
22107 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22108 {
22109 if (!DECL_ABSTRACT_P (node))
22110 {
22111 /* This can happen when creating an inlined instance, in
22112 which case we need to create a new DIE that will get
22113 annotated with DW_AT_abstract_origin. */
22114 parm_die = NULL;
22115 }
22116 else
22117 gcc_unreachable ();
22118 }
22119
22120 if (parm_die && parm_die->die_parent == NULL)
22121 {
22122 /* Check that parm_die already has the right attributes that
22123 we would have added below. If any attributes are
22124 missing, fall through to add them. */
22125 if (! DECL_ABSTRACT_P (node_or_origin)
22126 && !get_AT (parm_die, DW_AT_location)
22127 && !get_AT (parm_die, DW_AT_const_value))
22128 /* We are missing location info, and are about to add it. */
22129 ;
22130 else
22131 {
22132 add_child_die (context_die, parm_die);
22133 return parm_die;
22134 }
22135 }
22136 }
22137
22138 /* If we have a previously generated DIE, use it, unless this is an
22139 concrete instance (origin != NULL), in which case we need a new
22140 DIE with a corresponding DW_AT_abstract_origin. */
22141 bool reusing_die;
22142 if (parm_die && origin == NULL)
22143 reusing_die = true;
22144 else
22145 {
22146 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22147 reusing_die = false;
22148 }
22149
22150 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22151 {
22152 case tcc_declaration:
22153 ultimate_origin = decl_ultimate_origin (node_or_origin);
22154 if (node || ultimate_origin)
22155 origin = ultimate_origin;
22156
22157 if (reusing_die)
22158 goto add_location;
22159
22160 if (origin != NULL)
22161 add_abstract_origin_attribute (parm_die, origin);
22162 else if (emit_name_p)
22163 add_name_and_src_coords_attributes (parm_die, node);
22164 if (origin == NULL
22165 || (! DECL_ABSTRACT_P (node_or_origin)
22166 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22167 decl_function_context
22168 (node_or_origin))))
22169 {
22170 tree type = TREE_TYPE (node_or_origin);
22171 if (decl_by_reference_p (node_or_origin))
22172 add_type_attribute (parm_die, TREE_TYPE (type),
22173 TYPE_UNQUALIFIED,
22174 false, context_die);
22175 else
22176 add_type_attribute (parm_die, type,
22177 decl_quals (node_or_origin),
22178 false, context_die);
22179 }
22180 if (origin == NULL && DECL_ARTIFICIAL (node))
22181 add_AT_flag (parm_die, DW_AT_artificial, 1);
22182 add_location:
22183 if (node && node != origin)
22184 equate_decl_number_to_die (node, parm_die);
22185 if (! DECL_ABSTRACT_P (node_or_origin))
22186 add_location_or_const_value_attribute (parm_die, node_or_origin,
22187 node == NULL);
22188
22189 break;
22190
22191 case tcc_type:
22192 /* We were called with some kind of a ..._TYPE node. */
22193 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22194 context_die);
22195 break;
22196
22197 default:
22198 gcc_unreachable ();
22199 }
22200
22201 return parm_die;
22202 }
22203
22204 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22205 children DW_TAG_formal_parameter DIEs representing the arguments of the
22206 parameter pack.
22207
22208 PARM_PACK must be a function parameter pack.
22209 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22210 must point to the subsequent arguments of the function PACK_ARG belongs to.
22211 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22212 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22213 following the last one for which a DIE was generated. */
22214
22215 static dw_die_ref
22216 gen_formal_parameter_pack_die (tree parm_pack,
22217 tree pack_arg,
22218 dw_die_ref subr_die,
22219 tree *next_arg)
22220 {
22221 tree arg;
22222 dw_die_ref parm_pack_die;
22223
22224 gcc_assert (parm_pack
22225 && lang_hooks.function_parameter_pack_p (parm_pack)
22226 && subr_die);
22227
22228 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22229 add_src_coords_attributes (parm_pack_die, parm_pack);
22230
22231 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22232 {
22233 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22234 parm_pack))
22235 break;
22236 gen_formal_parameter_die (arg, NULL,
22237 false /* Don't emit name attribute. */,
22238 parm_pack_die);
22239 }
22240 if (next_arg)
22241 *next_arg = arg;
22242 return parm_pack_die;
22243 }
22244
22245 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22246 at the end of an (ANSI prototyped) formal parameters list. */
22247
22248 static void
22249 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22250 {
22251 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22252 }
22253
22254 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22255 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22256 parameters as specified in some function type specification (except for
22257 those which appear as part of a function *definition*). */
22258
22259 static void
22260 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22261 {
22262 tree link;
22263 tree formal_type = NULL;
22264 tree first_parm_type;
22265 tree arg;
22266
22267 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22268 {
22269 arg = DECL_ARGUMENTS (function_or_method_type);
22270 function_or_method_type = TREE_TYPE (function_or_method_type);
22271 }
22272 else
22273 arg = NULL_TREE;
22274
22275 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22276
22277 /* Make our first pass over the list of formal parameter types and output a
22278 DW_TAG_formal_parameter DIE for each one. */
22279 for (link = first_parm_type; link; )
22280 {
22281 dw_die_ref parm_die;
22282
22283 formal_type = TREE_VALUE (link);
22284 if (formal_type == void_type_node)
22285 break;
22286
22287 /* Output a (nameless) DIE to represent the formal parameter itself. */
22288 parm_die = gen_formal_parameter_die (formal_type, NULL,
22289 true /* Emit name attribute. */,
22290 context_die);
22291 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22292 && link == first_parm_type)
22293 {
22294 add_AT_flag (parm_die, DW_AT_artificial, 1);
22295 if (dwarf_version >= 3 || !dwarf_strict)
22296 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22297 }
22298 else if (arg && DECL_ARTIFICIAL (arg))
22299 add_AT_flag (parm_die, DW_AT_artificial, 1);
22300
22301 link = TREE_CHAIN (link);
22302 if (arg)
22303 arg = DECL_CHAIN (arg);
22304 }
22305
22306 /* If this function type has an ellipsis, add a
22307 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22308 if (formal_type != void_type_node)
22309 gen_unspecified_parameters_die (function_or_method_type, context_die);
22310
22311 /* Make our second (and final) pass over the list of formal parameter types
22312 and output DIEs to represent those types (as necessary). */
22313 for (link = TYPE_ARG_TYPES (function_or_method_type);
22314 link && TREE_VALUE (link);
22315 link = TREE_CHAIN (link))
22316 gen_type_die (TREE_VALUE (link), context_die);
22317 }
22318
22319 /* We want to generate the DIE for TYPE so that we can generate the
22320 die for MEMBER, which has been defined; we will need to refer back
22321 to the member declaration nested within TYPE. If we're trying to
22322 generate minimal debug info for TYPE, processing TYPE won't do the
22323 trick; we need to attach the member declaration by hand. */
22324
22325 static void
22326 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22327 {
22328 gen_type_die (type, context_die);
22329
22330 /* If we're trying to avoid duplicate debug info, we may not have
22331 emitted the member decl for this function. Emit it now. */
22332 if (TYPE_STUB_DECL (type)
22333 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22334 && ! lookup_decl_die (member))
22335 {
22336 dw_die_ref type_die;
22337 gcc_assert (!decl_ultimate_origin (member));
22338
22339 type_die = lookup_type_die_strip_naming_typedef (type);
22340 if (TREE_CODE (member) == FUNCTION_DECL)
22341 gen_subprogram_die (member, type_die);
22342 else if (TREE_CODE (member) == FIELD_DECL)
22343 {
22344 /* Ignore the nameless fields that are used to skip bits but handle
22345 C++ anonymous unions and structs. */
22346 if (DECL_NAME (member) != NULL_TREE
22347 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22348 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22349 {
22350 struct vlr_context vlr_ctx = {
22351 DECL_CONTEXT (member), /* struct_type */
22352 NULL_TREE /* variant_part_offset */
22353 };
22354 gen_type_die (member_declared_type (member), type_die);
22355 gen_field_die (member, &vlr_ctx, type_die);
22356 }
22357 }
22358 else
22359 gen_variable_die (member, NULL_TREE, type_die);
22360 }
22361 }
22362 \f
22363 /* Forward declare these functions, because they are mutually recursive
22364 with their set_block_* pairing functions. */
22365 static void set_decl_origin_self (tree);
22366
22367 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22368 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22369 that it points to the node itself, thus indicating that the node is its
22370 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22371 the given node is NULL, recursively descend the decl/block tree which
22372 it is the root of, and for each other ..._DECL or BLOCK node contained
22373 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22374 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22375 values to point to themselves. */
22376
22377 static void
22378 set_block_origin_self (tree stmt)
22379 {
22380 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22381 {
22382 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22383
22384 {
22385 tree local_decl;
22386
22387 for (local_decl = BLOCK_VARS (stmt);
22388 local_decl != NULL_TREE;
22389 local_decl = DECL_CHAIN (local_decl))
22390 /* Do not recurse on nested functions since the inlining status
22391 of parent and child can be different as per the DWARF spec. */
22392 if (TREE_CODE (local_decl) != FUNCTION_DECL
22393 && !DECL_EXTERNAL (local_decl))
22394 set_decl_origin_self (local_decl);
22395 }
22396
22397 {
22398 tree subblock;
22399
22400 for (subblock = BLOCK_SUBBLOCKS (stmt);
22401 subblock != NULL_TREE;
22402 subblock = BLOCK_CHAIN (subblock))
22403 set_block_origin_self (subblock); /* Recurse. */
22404 }
22405 }
22406 }
22407
22408 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22409 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22410 node to so that it points to the node itself, thus indicating that the
22411 node represents its own (abstract) origin. Additionally, if the
22412 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22413 the decl/block tree of which the given node is the root of, and for
22414 each other ..._DECL or BLOCK node contained therein whose
22415 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22416 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22417 point to themselves. */
22418
22419 static void
22420 set_decl_origin_self (tree decl)
22421 {
22422 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22423 {
22424 DECL_ABSTRACT_ORIGIN (decl) = decl;
22425 if (TREE_CODE (decl) == FUNCTION_DECL)
22426 {
22427 tree arg;
22428
22429 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22430 DECL_ABSTRACT_ORIGIN (arg) = arg;
22431 if (DECL_INITIAL (decl) != NULL_TREE
22432 && DECL_INITIAL (decl) != error_mark_node)
22433 set_block_origin_self (DECL_INITIAL (decl));
22434 }
22435 }
22436 }
22437 \f
22438 /* Mark the early DIE for DECL as the abstract instance. */
22439
22440 static void
22441 dwarf2out_abstract_function (tree decl)
22442 {
22443 dw_die_ref old_die;
22444
22445 /* Make sure we have the actual abstract inline, not a clone. */
22446 decl = DECL_ORIGIN (decl);
22447
22448 if (DECL_IGNORED_P (decl))
22449 return;
22450
22451 old_die = lookup_decl_die (decl);
22452 /* With early debug we always have an old DIE unless we are in LTO
22453 and the user did not compile but only link with debug. */
22454 if (in_lto_p && ! old_die)
22455 return;
22456 gcc_assert (old_die != NULL);
22457 if (get_AT (old_die, DW_AT_inline)
22458 || get_AT (old_die, DW_AT_abstract_origin))
22459 /* We've already generated the abstract instance. */
22460 return;
22461
22462 /* Go ahead and put DW_AT_inline on the DIE. */
22463 if (DECL_DECLARED_INLINE_P (decl))
22464 {
22465 if (cgraph_function_possibly_inlined_p (decl))
22466 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22467 else
22468 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22469 }
22470 else
22471 {
22472 if (cgraph_function_possibly_inlined_p (decl))
22473 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22474 else
22475 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22476 }
22477
22478 if (DECL_DECLARED_INLINE_P (decl)
22479 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22480 add_AT_flag (old_die, DW_AT_artificial, 1);
22481
22482 set_decl_origin_self (decl);
22483 }
22484
22485 /* Helper function of premark_used_types() which gets called through
22486 htab_traverse.
22487
22488 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22489 marked as unused by prune_unused_types. */
22490
22491 bool
22492 premark_used_types_helper (tree const &type, void *)
22493 {
22494 dw_die_ref die;
22495
22496 die = lookup_type_die (type);
22497 if (die != NULL)
22498 die->die_perennial_p = 1;
22499 return true;
22500 }
22501
22502 /* Helper function of premark_types_used_by_global_vars which gets called
22503 through htab_traverse.
22504
22505 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22506 marked as unused by prune_unused_types. The DIE of the type is marked
22507 only if the global variable using the type will actually be emitted. */
22508
22509 int
22510 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22511 void *)
22512 {
22513 struct types_used_by_vars_entry *entry;
22514 dw_die_ref die;
22515
22516 entry = (struct types_used_by_vars_entry *) *slot;
22517 gcc_assert (entry->type != NULL
22518 && entry->var_decl != NULL);
22519 die = lookup_type_die (entry->type);
22520 if (die)
22521 {
22522 /* Ask cgraph if the global variable really is to be emitted.
22523 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22524 varpool_node *node = varpool_node::get (entry->var_decl);
22525 if (node && node->definition)
22526 {
22527 die->die_perennial_p = 1;
22528 /* Keep the parent DIEs as well. */
22529 while ((die = die->die_parent) && die->die_perennial_p == 0)
22530 die->die_perennial_p = 1;
22531 }
22532 }
22533 return 1;
22534 }
22535
22536 /* Mark all members of used_types_hash as perennial. */
22537
22538 static void
22539 premark_used_types (struct function *fun)
22540 {
22541 if (fun && fun->used_types_hash)
22542 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22543 }
22544
22545 /* Mark all members of types_used_by_vars_entry as perennial. */
22546
22547 static void
22548 premark_types_used_by_global_vars (void)
22549 {
22550 if (types_used_by_vars_hash)
22551 types_used_by_vars_hash
22552 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22553 }
22554
22555 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22556 for CA_LOC call arg loc node. */
22557
22558 static dw_die_ref
22559 gen_call_site_die (tree decl, dw_die_ref subr_die,
22560 struct call_arg_loc_node *ca_loc)
22561 {
22562 dw_die_ref stmt_die = NULL, die;
22563 tree block = ca_loc->block;
22564
22565 while (block
22566 && block != DECL_INITIAL (decl)
22567 && TREE_CODE (block) == BLOCK)
22568 {
22569 stmt_die = BLOCK_DIE (block);
22570 if (stmt_die)
22571 break;
22572 block = BLOCK_SUPERCONTEXT (block);
22573 }
22574 if (stmt_die == NULL)
22575 stmt_die = subr_die;
22576 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22577 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22578 if (ca_loc->tail_call_p)
22579 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22580 if (ca_loc->symbol_ref)
22581 {
22582 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22583 if (tdie)
22584 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22585 else
22586 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22587 false);
22588 }
22589 return die;
22590 }
22591
22592 /* Generate a DIE to represent a declared function (either file-scope or
22593 block-local). */
22594
22595 static void
22596 gen_subprogram_die (tree decl, dw_die_ref context_die)
22597 {
22598 tree origin = decl_ultimate_origin (decl);
22599 dw_die_ref subr_die;
22600 dw_die_ref old_die = lookup_decl_die (decl);
22601
22602 /* This function gets called multiple times for different stages of
22603 the debug process. For example, for func() in this code:
22604
22605 namespace S
22606 {
22607 void func() { ... }
22608 }
22609
22610 ...we get called 4 times. Twice in early debug and twice in
22611 late debug:
22612
22613 Early debug
22614 -----------
22615
22616 1. Once while generating func() within the namespace. This is
22617 the declaration. The declaration bit below is set, as the
22618 context is the namespace.
22619
22620 A new DIE will be generated with DW_AT_declaration set.
22621
22622 2. Once for func() itself. This is the specification. The
22623 declaration bit below is clear as the context is the CU.
22624
22625 We will use the cached DIE from (1) to create a new DIE with
22626 DW_AT_specification pointing to the declaration in (1).
22627
22628 Late debug via rest_of_handle_final()
22629 -------------------------------------
22630
22631 3. Once generating func() within the namespace. This is also the
22632 declaration, as in (1), but this time we will early exit below
22633 as we have a cached DIE and a declaration needs no additional
22634 annotations (no locations), as the source declaration line
22635 info is enough.
22636
22637 4. Once for func() itself. As in (2), this is the specification,
22638 but this time we will re-use the cached DIE, and just annotate
22639 it with the location information that should now be available.
22640
22641 For something without namespaces, but with abstract instances, we
22642 are also called a multiple times:
22643
22644 class Base
22645 {
22646 public:
22647 Base (); // constructor declaration (1)
22648 };
22649
22650 Base::Base () { } // constructor specification (2)
22651
22652 Early debug
22653 -----------
22654
22655 1. Once for the Base() constructor by virtue of it being a
22656 member of the Base class. This is done via
22657 rest_of_type_compilation.
22658
22659 This is a declaration, so a new DIE will be created with
22660 DW_AT_declaration.
22661
22662 2. Once for the Base() constructor definition, but this time
22663 while generating the abstract instance of the base
22664 constructor (__base_ctor) which is being generated via early
22665 debug of reachable functions.
22666
22667 Even though we have a cached version of the declaration (1),
22668 we will create a DW_AT_specification of the declaration DIE
22669 in (1).
22670
22671 3. Once for the __base_ctor itself, but this time, we generate
22672 an DW_AT_abstract_origin version of the DW_AT_specification in
22673 (2).
22674
22675 Late debug via rest_of_handle_final
22676 -----------------------------------
22677
22678 4. One final time for the __base_ctor (which will have a cached
22679 DIE with DW_AT_abstract_origin created in (3). This time,
22680 we will just annotate the location information now
22681 available.
22682 */
22683 int declaration = (current_function_decl != decl
22684 || class_or_namespace_scope_p (context_die));
22685
22686 /* A declaration that has been previously dumped needs no
22687 additional information. */
22688 if (old_die && declaration)
22689 return;
22690
22691 /* Now that the C++ front end lazily declares artificial member fns, we
22692 might need to retrofit the declaration into its class. */
22693 if (!declaration && !origin && !old_die
22694 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22695 && !class_or_namespace_scope_p (context_die)
22696 && debug_info_level > DINFO_LEVEL_TERSE)
22697 old_die = force_decl_die (decl);
22698
22699 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22700 if (origin != NULL)
22701 {
22702 gcc_assert (!declaration || local_scope_p (context_die));
22703
22704 /* Fixup die_parent for the abstract instance of a nested
22705 inline function. */
22706 if (old_die && old_die->die_parent == NULL)
22707 add_child_die (context_die, old_die);
22708
22709 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22710 {
22711 /* If we have a DW_AT_abstract_origin we have a working
22712 cached version. */
22713 subr_die = old_die;
22714 }
22715 else
22716 {
22717 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22718 add_abstract_origin_attribute (subr_die, origin);
22719 /* This is where the actual code for a cloned function is.
22720 Let's emit linkage name attribute for it. This helps
22721 debuggers to e.g, set breakpoints into
22722 constructors/destructors when the user asks "break
22723 K::K". */
22724 add_linkage_name (subr_die, decl);
22725 }
22726 }
22727 /* A cached copy, possibly from early dwarf generation. Reuse as
22728 much as possible. */
22729 else if (old_die)
22730 {
22731 if (!get_AT_flag (old_die, DW_AT_declaration)
22732 /* We can have a normal definition following an inline one in the
22733 case of redefinition of GNU C extern inlines.
22734 It seems reasonable to use AT_specification in this case. */
22735 && !get_AT (old_die, DW_AT_inline))
22736 {
22737 /* Detect and ignore this case, where we are trying to output
22738 something we have already output. */
22739 if (get_AT (old_die, DW_AT_low_pc)
22740 || get_AT (old_die, DW_AT_ranges))
22741 return;
22742
22743 /* If we have no location information, this must be a
22744 partially generated DIE from early dwarf generation.
22745 Fall through and generate it. */
22746 }
22747
22748 /* If the definition comes from the same place as the declaration,
22749 maybe use the old DIE. We always want the DIE for this function
22750 that has the *_pc attributes to be under comp_unit_die so the
22751 debugger can find it. We also need to do this for abstract
22752 instances of inlines, since the spec requires the out-of-line copy
22753 to have the same parent. For local class methods, this doesn't
22754 apply; we just use the old DIE. */
22755 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22756 struct dwarf_file_data * file_index = lookup_filename (s.file);
22757 if (((is_unit_die (old_die->die_parent)
22758 /* This condition fixes the inconsistency/ICE with the
22759 following Fortran test (or some derivative thereof) while
22760 building libgfortran:
22761
22762 module some_m
22763 contains
22764 logical function funky (FLAG)
22765 funky = .true.
22766 end function
22767 end module
22768 */
22769 || (old_die->die_parent
22770 && old_die->die_parent->die_tag == DW_TAG_module)
22771 || context_die == NULL)
22772 && (DECL_ARTIFICIAL (decl)
22773 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22774 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22775 == (unsigned) s.line)
22776 && (!debug_column_info
22777 || s.column == 0
22778 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22779 == (unsigned) s.column)))))
22780 /* With LTO if there's an abstract instance for
22781 the old DIE, this is a concrete instance and
22782 thus re-use the DIE. */
22783 || get_AT (old_die, DW_AT_abstract_origin))
22784 {
22785 subr_die = old_die;
22786
22787 /* Clear out the declaration attribute, but leave the
22788 parameters so they can be augmented with location
22789 information later. Unless this was a declaration, in
22790 which case, wipe out the nameless parameters and recreate
22791 them further down. */
22792 if (remove_AT (subr_die, DW_AT_declaration))
22793 {
22794
22795 remove_AT (subr_die, DW_AT_object_pointer);
22796 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22797 }
22798 }
22799 /* Make a specification pointing to the previously built
22800 declaration. */
22801 else
22802 {
22803 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22804 add_AT_specification (subr_die, old_die);
22805 add_pubname (decl, subr_die);
22806 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22807 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22808 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22809 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22810 if (debug_column_info
22811 && s.column
22812 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22813 != (unsigned) s.column))
22814 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22815
22816 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22817 emit the real type on the definition die. */
22818 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22819 {
22820 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22821 if (die == auto_die || die == decltype_auto_die)
22822 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22823 TYPE_UNQUALIFIED, false, context_die);
22824 }
22825
22826 /* When we process the method declaration, we haven't seen
22827 the out-of-class defaulted definition yet, so we have to
22828 recheck now. */
22829 if ((dwarf_version >= 5 || ! dwarf_strict)
22830 && !get_AT (subr_die, DW_AT_defaulted))
22831 {
22832 int defaulted
22833 = lang_hooks.decls.decl_dwarf_attribute (decl,
22834 DW_AT_defaulted);
22835 if (defaulted != -1)
22836 {
22837 /* Other values must have been handled before. */
22838 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22839 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22840 }
22841 }
22842 }
22843 }
22844 /* Create a fresh DIE for anything else. */
22845 else
22846 {
22847 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22848
22849 if (TREE_PUBLIC (decl))
22850 add_AT_flag (subr_die, DW_AT_external, 1);
22851
22852 add_name_and_src_coords_attributes (subr_die, decl);
22853 add_pubname (decl, subr_die);
22854 if (debug_info_level > DINFO_LEVEL_TERSE)
22855 {
22856 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22857 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22858 TYPE_UNQUALIFIED, false, context_die);
22859 }
22860
22861 add_pure_or_virtual_attribute (subr_die, decl);
22862 if (DECL_ARTIFICIAL (decl))
22863 add_AT_flag (subr_die, DW_AT_artificial, 1);
22864
22865 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22866 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22867
22868 add_alignment_attribute (subr_die, decl);
22869
22870 add_accessibility_attribute (subr_die, decl);
22871 }
22872
22873 /* Unless we have an existing non-declaration DIE, equate the new
22874 DIE. */
22875 if (!old_die || is_declaration_die (old_die))
22876 equate_decl_number_to_die (decl, subr_die);
22877
22878 if (declaration)
22879 {
22880 if (!old_die || !get_AT (old_die, DW_AT_inline))
22881 {
22882 add_AT_flag (subr_die, DW_AT_declaration, 1);
22883
22884 /* If this is an explicit function declaration then generate
22885 a DW_AT_explicit attribute. */
22886 if ((dwarf_version >= 3 || !dwarf_strict)
22887 && lang_hooks.decls.decl_dwarf_attribute (decl,
22888 DW_AT_explicit) == 1)
22889 add_AT_flag (subr_die, DW_AT_explicit, 1);
22890
22891 /* If this is a C++11 deleted special function member then generate
22892 a DW_AT_deleted attribute. */
22893 if ((dwarf_version >= 5 || !dwarf_strict)
22894 && lang_hooks.decls.decl_dwarf_attribute (decl,
22895 DW_AT_deleted) == 1)
22896 add_AT_flag (subr_die, DW_AT_deleted, 1);
22897
22898 /* If this is a C++11 defaulted special function member then
22899 generate a DW_AT_defaulted attribute. */
22900 if (dwarf_version >= 5 || !dwarf_strict)
22901 {
22902 int defaulted
22903 = lang_hooks.decls.decl_dwarf_attribute (decl,
22904 DW_AT_defaulted);
22905 if (defaulted != -1)
22906 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22907 }
22908
22909 /* If this is a C++11 non-static member function with & ref-qualifier
22910 then generate a DW_AT_reference attribute. */
22911 if ((dwarf_version >= 5 || !dwarf_strict)
22912 && lang_hooks.decls.decl_dwarf_attribute (decl,
22913 DW_AT_reference) == 1)
22914 add_AT_flag (subr_die, DW_AT_reference, 1);
22915
22916 /* If this is a C++11 non-static member function with &&
22917 ref-qualifier then generate a DW_AT_reference attribute. */
22918 if ((dwarf_version >= 5 || !dwarf_strict)
22919 && lang_hooks.decls.decl_dwarf_attribute (decl,
22920 DW_AT_rvalue_reference)
22921 == 1)
22922 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22923 }
22924 }
22925 /* For non DECL_EXTERNALs, if range information is available, fill
22926 the DIE with it. */
22927 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22928 {
22929 HOST_WIDE_INT cfa_fb_offset;
22930
22931 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22932
22933 if (!crtl->has_bb_partition)
22934 {
22935 dw_fde_ref fde = fun->fde;
22936 if (fde->dw_fde_begin)
22937 {
22938 /* We have already generated the labels. */
22939 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22940 fde->dw_fde_end, false);
22941 }
22942 else
22943 {
22944 /* Create start/end labels and add the range. */
22945 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22946 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22947 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22948 current_function_funcdef_no);
22949 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22950 current_function_funcdef_no);
22951 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22952 false);
22953 }
22954
22955 #if VMS_DEBUGGING_INFO
22956 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22957 Section 2.3 Prologue and Epilogue Attributes:
22958 When a breakpoint is set on entry to a function, it is generally
22959 desirable for execution to be suspended, not on the very first
22960 instruction of the function, but rather at a point after the
22961 function's frame has been set up, after any language defined local
22962 declaration processing has been completed, and before execution of
22963 the first statement of the function begins. Debuggers generally
22964 cannot properly determine where this point is. Similarly for a
22965 breakpoint set on exit from a function. The prologue and epilogue
22966 attributes allow a compiler to communicate the location(s) to use. */
22967
22968 {
22969 if (fde->dw_fde_vms_end_prologue)
22970 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22971 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22972
22973 if (fde->dw_fde_vms_begin_epilogue)
22974 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22975 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22976 }
22977 #endif
22978
22979 }
22980 else
22981 {
22982 /* Generate pubnames entries for the split function code ranges. */
22983 dw_fde_ref fde = fun->fde;
22984
22985 if (fde->dw_fde_second_begin)
22986 {
22987 if (dwarf_version >= 3 || !dwarf_strict)
22988 {
22989 /* We should use ranges for non-contiguous code section
22990 addresses. Use the actual code range for the initial
22991 section, since the HOT/COLD labels might precede an
22992 alignment offset. */
22993 bool range_list_added = false;
22994 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
22995 fde->dw_fde_end, &range_list_added,
22996 false);
22997 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
22998 fde->dw_fde_second_end,
22999 &range_list_added, false);
23000 if (range_list_added)
23001 add_ranges (NULL);
23002 }
23003 else
23004 {
23005 /* There is no real support in DW2 for this .. so we make
23006 a work-around. First, emit the pub name for the segment
23007 containing the function label. Then make and emit a
23008 simplified subprogram DIE for the second segment with the
23009 name pre-fixed by __hot/cold_sect_of_. We use the same
23010 linkage name for the second die so that gdb will find both
23011 sections when given "b foo". */
23012 const char *name = NULL;
23013 tree decl_name = DECL_NAME (decl);
23014 dw_die_ref seg_die;
23015
23016 /* Do the 'primary' section. */
23017 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23018 fde->dw_fde_end, false);
23019
23020 /* Build a minimal DIE for the secondary section. */
23021 seg_die = new_die (DW_TAG_subprogram,
23022 subr_die->die_parent, decl);
23023
23024 if (TREE_PUBLIC (decl))
23025 add_AT_flag (seg_die, DW_AT_external, 1);
23026
23027 if (decl_name != NULL
23028 && IDENTIFIER_POINTER (decl_name) != NULL)
23029 {
23030 name = dwarf2_name (decl, 1);
23031 if (! DECL_ARTIFICIAL (decl))
23032 add_src_coords_attributes (seg_die, decl);
23033
23034 add_linkage_name (seg_die, decl);
23035 }
23036 gcc_assert (name != NULL);
23037 add_pure_or_virtual_attribute (seg_die, decl);
23038 if (DECL_ARTIFICIAL (decl))
23039 add_AT_flag (seg_die, DW_AT_artificial, 1);
23040
23041 name = concat ("__second_sect_of_", name, NULL);
23042 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23043 fde->dw_fde_second_end, false);
23044 add_name_attribute (seg_die, name);
23045 if (want_pubnames ())
23046 add_pubname_string (name, seg_die);
23047 }
23048 }
23049 else
23050 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23051 false);
23052 }
23053
23054 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23055
23056 /* We define the "frame base" as the function's CFA. This is more
23057 convenient for several reasons: (1) It's stable across the prologue
23058 and epilogue, which makes it better than just a frame pointer,
23059 (2) With dwarf3, there exists a one-byte encoding that allows us
23060 to reference the .debug_frame data by proxy, but failing that,
23061 (3) We can at least reuse the code inspection and interpretation
23062 code that determines the CFA position at various points in the
23063 function. */
23064 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23065 {
23066 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23067 add_AT_loc (subr_die, DW_AT_frame_base, op);
23068 }
23069 else
23070 {
23071 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23072 if (list->dw_loc_next)
23073 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23074 else
23075 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23076 }
23077
23078 /* Compute a displacement from the "steady-state frame pointer" to
23079 the CFA. The former is what all stack slots and argument slots
23080 will reference in the rtl; the latter is what we've told the
23081 debugger about. We'll need to adjust all frame_base references
23082 by this displacement. */
23083 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23084
23085 if (fun->static_chain_decl)
23086 {
23087 /* DWARF requires here a location expression that computes the
23088 address of the enclosing subprogram's frame base. The machinery
23089 in tree-nested.c is supposed to store this specific address in the
23090 last field of the FRAME record. */
23091 const tree frame_type
23092 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23093 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23094
23095 tree fb_expr
23096 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23097 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23098 fb_expr, fb_decl, NULL_TREE);
23099
23100 add_AT_location_description (subr_die, DW_AT_static_link,
23101 loc_list_from_tree (fb_expr, 0, NULL));
23102 }
23103
23104 resolve_variable_values ();
23105 }
23106
23107 /* Generate child dies for template paramaters. */
23108 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23109 gen_generic_params_dies (decl);
23110
23111 /* Now output descriptions of the arguments for this function. This gets
23112 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23113 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23114 `...' at the end of the formal parameter list. In order to find out if
23115 there was a trailing ellipsis or not, we must instead look at the type
23116 associated with the FUNCTION_DECL. This will be a node of type
23117 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23118 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23119 an ellipsis at the end. */
23120
23121 /* In the case where we are describing a mere function declaration, all we
23122 need to do here (and all we *can* do here) is to describe the *types* of
23123 its formal parameters. */
23124 if (debug_info_level <= DINFO_LEVEL_TERSE)
23125 ;
23126 else if (declaration)
23127 gen_formal_types_die (decl, subr_die);
23128 else
23129 {
23130 /* Generate DIEs to represent all known formal parameters. */
23131 tree parm = DECL_ARGUMENTS (decl);
23132 tree generic_decl = early_dwarf
23133 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23134 tree generic_decl_parm = generic_decl
23135 ? DECL_ARGUMENTS (generic_decl)
23136 : NULL;
23137
23138 /* Now we want to walk the list of parameters of the function and
23139 emit their relevant DIEs.
23140
23141 We consider the case of DECL being an instance of a generic function
23142 as well as it being a normal function.
23143
23144 If DECL is an instance of a generic function we walk the
23145 parameters of the generic function declaration _and_ the parameters of
23146 DECL itself. This is useful because we want to emit specific DIEs for
23147 function parameter packs and those are declared as part of the
23148 generic function declaration. In that particular case,
23149 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23150 That DIE has children DIEs representing the set of arguments
23151 of the pack. Note that the set of pack arguments can be empty.
23152 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23153 children DIE.
23154
23155 Otherwise, we just consider the parameters of DECL. */
23156 while (generic_decl_parm || parm)
23157 {
23158 if (generic_decl_parm
23159 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23160 gen_formal_parameter_pack_die (generic_decl_parm,
23161 parm, subr_die,
23162 &parm);
23163 else if (parm)
23164 {
23165 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23166
23167 if (early_dwarf
23168 && parm == DECL_ARGUMENTS (decl)
23169 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23170 && parm_die
23171 && (dwarf_version >= 3 || !dwarf_strict))
23172 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23173
23174 parm = DECL_CHAIN (parm);
23175 }
23176 else if (parm)
23177 parm = DECL_CHAIN (parm);
23178
23179 if (generic_decl_parm)
23180 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23181 }
23182
23183 /* Decide whether we need an unspecified_parameters DIE at the end.
23184 There are 2 more cases to do this for: 1) the ansi ... declaration -
23185 this is detectable when the end of the arg list is not a
23186 void_type_node 2) an unprototyped function declaration (not a
23187 definition). This just means that we have no info about the
23188 parameters at all. */
23189 if (early_dwarf)
23190 {
23191 if (prototype_p (TREE_TYPE (decl)))
23192 {
23193 /* This is the prototyped case, check for.... */
23194 if (stdarg_p (TREE_TYPE (decl)))
23195 gen_unspecified_parameters_die (decl, subr_die);
23196 }
23197 else if (DECL_INITIAL (decl) == NULL_TREE)
23198 gen_unspecified_parameters_die (decl, subr_die);
23199 }
23200 }
23201
23202 if (subr_die != old_die)
23203 /* Add the calling convention attribute if requested. */
23204 add_calling_convention_attribute (subr_die, decl);
23205
23206 /* Output Dwarf info for all of the stuff within the body of the function
23207 (if it has one - it may be just a declaration).
23208
23209 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23210 a function. This BLOCK actually represents the outermost binding contour
23211 for the function, i.e. the contour in which the function's formal
23212 parameters and labels get declared. Curiously, it appears that the front
23213 end doesn't actually put the PARM_DECL nodes for the current function onto
23214 the BLOCK_VARS list for this outer scope, but are strung off of the
23215 DECL_ARGUMENTS list for the function instead.
23216
23217 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23218 the LABEL_DECL nodes for the function however, and we output DWARF info
23219 for those in decls_for_scope. Just within the `outer_scope' there will be
23220 a BLOCK node representing the function's outermost pair of curly braces,
23221 and any blocks used for the base and member initializers of a C++
23222 constructor function. */
23223 tree outer_scope = DECL_INITIAL (decl);
23224 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23225 {
23226 int call_site_note_count = 0;
23227 int tail_call_site_note_count = 0;
23228
23229 /* Emit a DW_TAG_variable DIE for a named return value. */
23230 if (DECL_NAME (DECL_RESULT (decl)))
23231 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23232
23233 /* The first time through decls_for_scope we will generate the
23234 DIEs for the locals. The second time, we fill in the
23235 location info. */
23236 decls_for_scope (outer_scope, subr_die);
23237
23238 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23239 {
23240 struct call_arg_loc_node *ca_loc;
23241 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23242 {
23243 dw_die_ref die = NULL;
23244 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23245 rtx arg, next_arg;
23246
23247 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23248 ? XEXP (ca_loc->call_arg_loc_note, 0)
23249 : NULL_RTX);
23250 arg; arg = next_arg)
23251 {
23252 dw_loc_descr_ref reg, val;
23253 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23254 dw_die_ref cdie, tdie = NULL;
23255
23256 next_arg = XEXP (arg, 1);
23257 if (REG_P (XEXP (XEXP (arg, 0), 0))
23258 && next_arg
23259 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23260 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23261 && REGNO (XEXP (XEXP (arg, 0), 0))
23262 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23263 next_arg = XEXP (next_arg, 1);
23264 if (mode == VOIDmode)
23265 {
23266 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23267 if (mode == VOIDmode)
23268 mode = GET_MODE (XEXP (arg, 0));
23269 }
23270 if (mode == VOIDmode || mode == BLKmode)
23271 continue;
23272 /* Get dynamic information about call target only if we
23273 have no static information: we cannot generate both
23274 DW_AT_call_origin and DW_AT_call_target
23275 attributes. */
23276 if (ca_loc->symbol_ref == NULL_RTX)
23277 {
23278 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23279 {
23280 tloc = XEXP (XEXP (arg, 0), 1);
23281 continue;
23282 }
23283 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23284 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23285 {
23286 tlocc = XEXP (XEXP (arg, 0), 1);
23287 continue;
23288 }
23289 }
23290 reg = NULL;
23291 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23292 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23293 VAR_INIT_STATUS_INITIALIZED);
23294 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23295 {
23296 rtx mem = XEXP (XEXP (arg, 0), 0);
23297 reg = mem_loc_descriptor (XEXP (mem, 0),
23298 get_address_mode (mem),
23299 GET_MODE (mem),
23300 VAR_INIT_STATUS_INITIALIZED);
23301 }
23302 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23303 == DEBUG_PARAMETER_REF)
23304 {
23305 tree tdecl
23306 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23307 tdie = lookup_decl_die (tdecl);
23308 if (tdie == NULL)
23309 continue;
23310 }
23311 else
23312 continue;
23313 if (reg == NULL
23314 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23315 != DEBUG_PARAMETER_REF)
23316 continue;
23317 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23318 VOIDmode,
23319 VAR_INIT_STATUS_INITIALIZED);
23320 if (val == NULL)
23321 continue;
23322 if (die == NULL)
23323 die = gen_call_site_die (decl, subr_die, ca_loc);
23324 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23325 NULL_TREE);
23326 if (reg != NULL)
23327 add_AT_loc (cdie, DW_AT_location, reg);
23328 else if (tdie != NULL)
23329 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23330 tdie);
23331 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23332 if (next_arg != XEXP (arg, 1))
23333 {
23334 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23335 if (mode == VOIDmode)
23336 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23337 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23338 0), 1),
23339 mode, VOIDmode,
23340 VAR_INIT_STATUS_INITIALIZED);
23341 if (val != NULL)
23342 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23343 val);
23344 }
23345 }
23346 if (die == NULL
23347 && (ca_loc->symbol_ref || tloc))
23348 die = gen_call_site_die (decl, subr_die, ca_loc);
23349 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23350 {
23351 dw_loc_descr_ref tval = NULL;
23352
23353 if (tloc != NULL_RTX)
23354 tval = mem_loc_descriptor (tloc,
23355 GET_MODE (tloc) == VOIDmode
23356 ? Pmode : GET_MODE (tloc),
23357 VOIDmode,
23358 VAR_INIT_STATUS_INITIALIZED);
23359 if (tval)
23360 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23361 else if (tlocc != NULL_RTX)
23362 {
23363 tval = mem_loc_descriptor (tlocc,
23364 GET_MODE (tlocc) == VOIDmode
23365 ? Pmode : GET_MODE (tlocc),
23366 VOIDmode,
23367 VAR_INIT_STATUS_INITIALIZED);
23368 if (tval)
23369 add_AT_loc (die,
23370 dwarf_AT (DW_AT_call_target_clobbered),
23371 tval);
23372 }
23373 }
23374 if (die != NULL)
23375 {
23376 call_site_note_count++;
23377 if (ca_loc->tail_call_p)
23378 tail_call_site_note_count++;
23379 }
23380 }
23381 }
23382 call_arg_locations = NULL;
23383 call_arg_loc_last = NULL;
23384 if (tail_call_site_count >= 0
23385 && tail_call_site_count == tail_call_site_note_count
23386 && (!dwarf_strict || dwarf_version >= 5))
23387 {
23388 if (call_site_count >= 0
23389 && call_site_count == call_site_note_count)
23390 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23391 else
23392 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23393 }
23394 call_site_count = -1;
23395 tail_call_site_count = -1;
23396 }
23397
23398 /* Mark used types after we have created DIEs for the functions scopes. */
23399 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23400 }
23401
23402 /* Returns a hash value for X (which really is a die_struct). */
23403
23404 hashval_t
23405 block_die_hasher::hash (die_struct *d)
23406 {
23407 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23408 }
23409
23410 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23411 as decl_id and die_parent of die_struct Y. */
23412
23413 bool
23414 block_die_hasher::equal (die_struct *x, die_struct *y)
23415 {
23416 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23417 }
23418
23419 /* Hold information about markers for inlined entry points. */
23420 struct GTY ((for_user)) inline_entry_data
23421 {
23422 /* The block that's the inlined_function_outer_scope for an inlined
23423 function. */
23424 tree block;
23425
23426 /* The label at the inlined entry point. */
23427 const char *label_pfx;
23428 unsigned int label_num;
23429
23430 /* The view number to be used as the inlined entry point. */
23431 var_loc_view view;
23432 };
23433
23434 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23435 {
23436 typedef tree compare_type;
23437 static inline hashval_t hash (const inline_entry_data *);
23438 static inline bool equal (const inline_entry_data *, const_tree);
23439 };
23440
23441 /* Hash table routines for inline_entry_data. */
23442
23443 inline hashval_t
23444 inline_entry_data_hasher::hash (const inline_entry_data *data)
23445 {
23446 return htab_hash_pointer (data->block);
23447 }
23448
23449 inline bool
23450 inline_entry_data_hasher::equal (const inline_entry_data *data,
23451 const_tree block)
23452 {
23453 return data->block == block;
23454 }
23455
23456 /* Inlined entry points pending DIE creation in this compilation unit. */
23457
23458 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23459
23460
23461 /* Return TRUE if DECL, which may have been previously generated as
23462 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23463 true if decl (or its origin) is either an extern declaration or a
23464 class/namespace scoped declaration.
23465
23466 The declare_in_namespace support causes us to get two DIEs for one
23467 variable, both of which are declarations. We want to avoid
23468 considering one to be a specification, so we must test for
23469 DECLARATION and DW_AT_declaration. */
23470 static inline bool
23471 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23472 {
23473 return (old_die && TREE_STATIC (decl) && !declaration
23474 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23475 }
23476
23477 /* Return true if DECL is a local static. */
23478
23479 static inline bool
23480 local_function_static (tree decl)
23481 {
23482 gcc_assert (VAR_P (decl));
23483 return TREE_STATIC (decl)
23484 && DECL_CONTEXT (decl)
23485 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23486 }
23487
23488 /* Generate a DIE to represent a declared data object.
23489 Either DECL or ORIGIN must be non-null. */
23490
23491 static void
23492 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23493 {
23494 HOST_WIDE_INT off = 0;
23495 tree com_decl;
23496 tree decl_or_origin = decl ? decl : origin;
23497 tree ultimate_origin;
23498 dw_die_ref var_die;
23499 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23500 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23501 || class_or_namespace_scope_p (context_die));
23502 bool specialization_p = false;
23503 bool no_linkage_name = false;
23504
23505 /* While C++ inline static data members have definitions inside of the
23506 class, force the first DIE to be a declaration, then let gen_member_die
23507 reparent it to the class context and call gen_variable_die again
23508 to create the outside of the class DIE for the definition. */
23509 if (!declaration
23510 && old_die == NULL
23511 && decl
23512 && DECL_CONTEXT (decl)
23513 && TYPE_P (DECL_CONTEXT (decl))
23514 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23515 {
23516 declaration = true;
23517 if (dwarf_version < 5)
23518 no_linkage_name = true;
23519 }
23520
23521 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23522 if (decl || ultimate_origin)
23523 origin = ultimate_origin;
23524 com_decl = fortran_common (decl_or_origin, &off);
23525
23526 /* Symbol in common gets emitted as a child of the common block, in the form
23527 of a data member. */
23528 if (com_decl)
23529 {
23530 dw_die_ref com_die;
23531 dw_loc_list_ref loc = NULL;
23532 die_node com_die_arg;
23533
23534 var_die = lookup_decl_die (decl_or_origin);
23535 if (var_die)
23536 {
23537 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23538 {
23539 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23540 if (loc)
23541 {
23542 if (off)
23543 {
23544 /* Optimize the common case. */
23545 if (single_element_loc_list_p (loc)
23546 && loc->expr->dw_loc_opc == DW_OP_addr
23547 && loc->expr->dw_loc_next == NULL
23548 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23549 == SYMBOL_REF)
23550 {
23551 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23552 loc->expr->dw_loc_oprnd1.v.val_addr
23553 = plus_constant (GET_MODE (x), x , off);
23554 }
23555 else
23556 loc_list_plus_const (loc, off);
23557 }
23558 add_AT_location_description (var_die, DW_AT_location, loc);
23559 remove_AT (var_die, DW_AT_declaration);
23560 }
23561 }
23562 return;
23563 }
23564
23565 if (common_block_die_table == NULL)
23566 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23567
23568 com_die_arg.decl_id = DECL_UID (com_decl);
23569 com_die_arg.die_parent = context_die;
23570 com_die = common_block_die_table->find (&com_die_arg);
23571 if (! early_dwarf)
23572 loc = loc_list_from_tree (com_decl, 2, NULL);
23573 if (com_die == NULL)
23574 {
23575 const char *cnam
23576 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23577 die_node **slot;
23578
23579 com_die = new_die (DW_TAG_common_block, context_die, decl);
23580 add_name_and_src_coords_attributes (com_die, com_decl);
23581 if (loc)
23582 {
23583 add_AT_location_description (com_die, DW_AT_location, loc);
23584 /* Avoid sharing the same loc descriptor between
23585 DW_TAG_common_block and DW_TAG_variable. */
23586 loc = loc_list_from_tree (com_decl, 2, NULL);
23587 }
23588 else if (DECL_EXTERNAL (decl_or_origin))
23589 add_AT_flag (com_die, DW_AT_declaration, 1);
23590 if (want_pubnames ())
23591 add_pubname_string (cnam, com_die); /* ??? needed? */
23592 com_die->decl_id = DECL_UID (com_decl);
23593 slot = common_block_die_table->find_slot (com_die, INSERT);
23594 *slot = com_die;
23595 }
23596 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23597 {
23598 add_AT_location_description (com_die, DW_AT_location, loc);
23599 loc = loc_list_from_tree (com_decl, 2, NULL);
23600 remove_AT (com_die, DW_AT_declaration);
23601 }
23602 var_die = new_die (DW_TAG_variable, com_die, decl);
23603 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23604 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23605 decl_quals (decl_or_origin), false,
23606 context_die);
23607 add_alignment_attribute (var_die, decl);
23608 add_AT_flag (var_die, DW_AT_external, 1);
23609 if (loc)
23610 {
23611 if (off)
23612 {
23613 /* Optimize the common case. */
23614 if (single_element_loc_list_p (loc)
23615 && loc->expr->dw_loc_opc == DW_OP_addr
23616 && loc->expr->dw_loc_next == NULL
23617 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23618 {
23619 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23620 loc->expr->dw_loc_oprnd1.v.val_addr
23621 = plus_constant (GET_MODE (x), x, off);
23622 }
23623 else
23624 loc_list_plus_const (loc, off);
23625 }
23626 add_AT_location_description (var_die, DW_AT_location, loc);
23627 }
23628 else if (DECL_EXTERNAL (decl_or_origin))
23629 add_AT_flag (var_die, DW_AT_declaration, 1);
23630 if (decl)
23631 equate_decl_number_to_die (decl, var_die);
23632 return;
23633 }
23634
23635 if (old_die)
23636 {
23637 if (declaration)
23638 {
23639 /* A declaration that has been previously dumped, needs no
23640 further annotations, since it doesn't need location on
23641 the second pass. */
23642 return;
23643 }
23644 else if (decl_will_get_specification_p (old_die, decl, declaration)
23645 && !get_AT (old_die, DW_AT_specification))
23646 {
23647 /* Fall-thru so we can make a new variable die along with a
23648 DW_AT_specification. */
23649 }
23650 else if (origin && old_die->die_parent != context_die)
23651 {
23652 /* If we will be creating an inlined instance, we need a
23653 new DIE that will get annotated with
23654 DW_AT_abstract_origin. */
23655 gcc_assert (!DECL_ABSTRACT_P (decl));
23656 }
23657 else
23658 {
23659 /* If a DIE was dumped early, it still needs location info.
23660 Skip to where we fill the location bits. */
23661 var_die = old_die;
23662
23663 /* ??? In LTRANS we cannot annotate early created variably
23664 modified type DIEs without copying them and adjusting all
23665 references to them. Thus we dumped them again. Also add a
23666 reference to them but beware of -g0 compile and -g link
23667 in which case the reference will be already present. */
23668 tree type = TREE_TYPE (decl_or_origin);
23669 if (in_lto_p
23670 && ! get_AT (var_die, DW_AT_type)
23671 && variably_modified_type_p
23672 (type, decl_function_context (decl_or_origin)))
23673 {
23674 if (decl_by_reference_p (decl_or_origin))
23675 add_type_attribute (var_die, TREE_TYPE (type),
23676 TYPE_UNQUALIFIED, false, context_die);
23677 else
23678 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23679 false, context_die);
23680 }
23681
23682 goto gen_variable_die_location;
23683 }
23684 }
23685
23686 /* For static data members, the declaration in the class is supposed
23687 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23688 also in DWARF2; the specification should still be DW_TAG_variable
23689 referencing the DW_TAG_member DIE. */
23690 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23691 var_die = new_die (DW_TAG_member, context_die, decl);
23692 else
23693 var_die = new_die (DW_TAG_variable, context_die, decl);
23694
23695 if (origin != NULL)
23696 add_abstract_origin_attribute (var_die, origin);
23697
23698 /* Loop unrolling can create multiple blocks that refer to the same
23699 static variable, so we must test for the DW_AT_declaration flag.
23700
23701 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23702 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23703 sharing them.
23704
23705 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23706 else if (decl_will_get_specification_p (old_die, decl, declaration))
23707 {
23708 /* This is a definition of a C++ class level static. */
23709 add_AT_specification (var_die, old_die);
23710 specialization_p = true;
23711 if (DECL_NAME (decl))
23712 {
23713 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23714 struct dwarf_file_data * file_index = lookup_filename (s.file);
23715
23716 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23717 add_AT_file (var_die, DW_AT_decl_file, file_index);
23718
23719 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23720 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23721
23722 if (debug_column_info
23723 && s.column
23724 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23725 != (unsigned) s.column))
23726 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23727
23728 if (old_die->die_tag == DW_TAG_member)
23729 add_linkage_name (var_die, decl);
23730 }
23731 }
23732 else
23733 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23734
23735 if ((origin == NULL && !specialization_p)
23736 || (origin != NULL
23737 && !DECL_ABSTRACT_P (decl_or_origin)
23738 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23739 decl_function_context
23740 (decl_or_origin))))
23741 {
23742 tree type = TREE_TYPE (decl_or_origin);
23743
23744 if (decl_by_reference_p (decl_or_origin))
23745 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23746 context_die);
23747 else
23748 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23749 context_die);
23750 }
23751
23752 if (origin == NULL && !specialization_p)
23753 {
23754 if (TREE_PUBLIC (decl))
23755 add_AT_flag (var_die, DW_AT_external, 1);
23756
23757 if (DECL_ARTIFICIAL (decl))
23758 add_AT_flag (var_die, DW_AT_artificial, 1);
23759
23760 add_alignment_attribute (var_die, decl);
23761
23762 add_accessibility_attribute (var_die, decl);
23763 }
23764
23765 if (declaration)
23766 add_AT_flag (var_die, DW_AT_declaration, 1);
23767
23768 if (decl && (DECL_ABSTRACT_P (decl)
23769 || !old_die || is_declaration_die (old_die)))
23770 equate_decl_number_to_die (decl, var_die);
23771
23772 gen_variable_die_location:
23773 if (! declaration
23774 && (! DECL_ABSTRACT_P (decl_or_origin)
23775 /* Local static vars are shared between all clones/inlines,
23776 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23777 already set. */
23778 || (VAR_P (decl_or_origin)
23779 && TREE_STATIC (decl_or_origin)
23780 && DECL_RTL_SET_P (decl_or_origin))))
23781 {
23782 if (early_dwarf)
23783 add_pubname (decl_or_origin, var_die);
23784 else
23785 add_location_or_const_value_attribute (var_die, decl_or_origin,
23786 decl == NULL);
23787 }
23788 else
23789 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23790
23791 if ((dwarf_version >= 4 || !dwarf_strict)
23792 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23793 DW_AT_const_expr) == 1
23794 && !get_AT (var_die, DW_AT_const_expr)
23795 && !specialization_p)
23796 add_AT_flag (var_die, DW_AT_const_expr, 1);
23797
23798 if (!dwarf_strict)
23799 {
23800 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23801 DW_AT_inline);
23802 if (inl != -1
23803 && !get_AT (var_die, DW_AT_inline)
23804 && !specialization_p)
23805 add_AT_unsigned (var_die, DW_AT_inline, inl);
23806 }
23807 }
23808
23809 /* Generate a DIE to represent a named constant. */
23810
23811 static void
23812 gen_const_die (tree decl, dw_die_ref context_die)
23813 {
23814 dw_die_ref const_die;
23815 tree type = TREE_TYPE (decl);
23816
23817 const_die = lookup_decl_die (decl);
23818 if (const_die)
23819 return;
23820
23821 const_die = new_die (DW_TAG_constant, context_die, decl);
23822 equate_decl_number_to_die (decl, const_die);
23823 add_name_and_src_coords_attributes (const_die, decl);
23824 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23825 if (TREE_PUBLIC (decl))
23826 add_AT_flag (const_die, DW_AT_external, 1);
23827 if (DECL_ARTIFICIAL (decl))
23828 add_AT_flag (const_die, DW_AT_artificial, 1);
23829 tree_add_const_value_attribute_for_decl (const_die, decl);
23830 }
23831
23832 /* Generate a DIE to represent a label identifier. */
23833
23834 static void
23835 gen_label_die (tree decl, dw_die_ref context_die)
23836 {
23837 tree origin = decl_ultimate_origin (decl);
23838 dw_die_ref lbl_die = lookup_decl_die (decl);
23839 rtx insn;
23840 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23841
23842 if (!lbl_die)
23843 {
23844 lbl_die = new_die (DW_TAG_label, context_die, decl);
23845 equate_decl_number_to_die (decl, lbl_die);
23846
23847 if (origin != NULL)
23848 add_abstract_origin_attribute (lbl_die, origin);
23849 else
23850 add_name_and_src_coords_attributes (lbl_die, decl);
23851 }
23852
23853 if (DECL_ABSTRACT_P (decl))
23854 equate_decl_number_to_die (decl, lbl_die);
23855 else if (! early_dwarf)
23856 {
23857 insn = DECL_RTL_IF_SET (decl);
23858
23859 /* Deleted labels are programmer specified labels which have been
23860 eliminated because of various optimizations. We still emit them
23861 here so that it is possible to put breakpoints on them. */
23862 if (insn
23863 && (LABEL_P (insn)
23864 || ((NOTE_P (insn)
23865 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23866 {
23867 /* When optimization is enabled (via -O) some parts of the compiler
23868 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23869 represent source-level labels which were explicitly declared by
23870 the user. This really shouldn't be happening though, so catch
23871 it if it ever does happen. */
23872 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23873
23874 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23875 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23876 }
23877 else if (insn
23878 && NOTE_P (insn)
23879 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23880 && CODE_LABEL_NUMBER (insn) != -1)
23881 {
23882 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23883 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23884 }
23885 }
23886 }
23887
23888 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23889 attributes to the DIE for a block STMT, to describe where the inlined
23890 function was called from. This is similar to add_src_coords_attributes. */
23891
23892 static inline void
23893 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23894 {
23895 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23896
23897 if (dwarf_version >= 3 || !dwarf_strict)
23898 {
23899 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23900 add_AT_unsigned (die, DW_AT_call_line, s.line);
23901 if (debug_column_info && s.column)
23902 add_AT_unsigned (die, DW_AT_call_column, s.column);
23903 }
23904 }
23905
23906
23907 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23908 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23909
23910 static inline void
23911 add_high_low_attributes (tree stmt, dw_die_ref die)
23912 {
23913 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23914
23915 if (inline_entry_data **iedp
23916 = !inline_entry_data_table ? NULL
23917 : inline_entry_data_table->find_slot_with_hash (stmt,
23918 htab_hash_pointer (stmt),
23919 NO_INSERT))
23920 {
23921 inline_entry_data *ied = *iedp;
23922 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23923 gcc_assert (debug_inline_points);
23924 gcc_assert (inlined_function_outer_scope_p (stmt));
23925
23926 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23927 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23928
23929 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23930 && !dwarf_strict)
23931 {
23932 if (!output_asm_line_debug_info ())
23933 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23934 else
23935 {
23936 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23937 /* FIXME: this will resolve to a small number. Could we
23938 possibly emit smaller data? Ideally we'd emit a
23939 uleb128, but that would make the size of DIEs
23940 impossible for the compiler to compute, since it's
23941 the assembler that computes the value of the view
23942 label in this case. Ideally, we'd have a single form
23943 encompassing both the address and the view, and
23944 indirecting them through a table might make things
23945 easier, but even that would be more wasteful,
23946 space-wise, than what we have now. */
23947 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23948 }
23949 }
23950
23951 inline_entry_data_table->clear_slot (iedp);
23952 }
23953
23954 if (BLOCK_FRAGMENT_CHAIN (stmt)
23955 && (dwarf_version >= 3 || !dwarf_strict))
23956 {
23957 tree chain, superblock = NULL_TREE;
23958 dw_die_ref pdie;
23959 dw_attr_node *attr = NULL;
23960
23961 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
23962 {
23963 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23964 BLOCK_NUMBER (stmt));
23965 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23966 }
23967
23968 /* Optimize duplicate .debug_ranges lists or even tails of
23969 lists. If this BLOCK has same ranges as its supercontext,
23970 lookup DW_AT_ranges attribute in the supercontext (and
23971 recursively so), verify that the ranges_table contains the
23972 right values and use it instead of adding a new .debug_range. */
23973 for (chain = stmt, pdie = die;
23974 BLOCK_SAME_RANGE (chain);
23975 chain = BLOCK_SUPERCONTEXT (chain))
23976 {
23977 dw_attr_node *new_attr;
23978
23979 pdie = pdie->die_parent;
23980 if (pdie == NULL)
23981 break;
23982 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23983 break;
23984 new_attr = get_AT (pdie, DW_AT_ranges);
23985 if (new_attr == NULL
23986 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
23987 break;
23988 attr = new_attr;
23989 superblock = BLOCK_SUPERCONTEXT (chain);
23990 }
23991 if (attr != NULL
23992 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
23993 == BLOCK_NUMBER (superblock))
23994 && BLOCK_FRAGMENT_CHAIN (superblock))
23995 {
23996 unsigned long off = attr->dw_attr_val.v.val_offset;
23997 unsigned long supercnt = 0, thiscnt = 0;
23998 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
23999 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24000 {
24001 ++supercnt;
24002 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24003 == BLOCK_NUMBER (chain));
24004 }
24005 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24006 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24007 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24008 ++thiscnt;
24009 gcc_assert (supercnt >= thiscnt);
24010 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24011 false);
24012 note_rnglist_head (off + supercnt - thiscnt);
24013 return;
24014 }
24015
24016 unsigned int offset = add_ranges (stmt, true);
24017 add_AT_range_list (die, DW_AT_ranges, offset, false);
24018 note_rnglist_head (offset);
24019
24020 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24021 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24022 do
24023 {
24024 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24025 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24026 chain = BLOCK_FRAGMENT_CHAIN (chain);
24027 }
24028 while (chain);
24029 add_ranges (NULL);
24030 }
24031 else
24032 {
24033 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24034 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24035 BLOCK_NUMBER (stmt));
24036 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24037 BLOCK_NUMBER (stmt));
24038 add_AT_low_high_pc (die, label, label_high, false);
24039 }
24040 }
24041
24042 /* Generate a DIE for a lexical block. */
24043
24044 static void
24045 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24046 {
24047 dw_die_ref old_die = BLOCK_DIE (stmt);
24048 dw_die_ref stmt_die = NULL;
24049 if (!old_die)
24050 {
24051 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24052 BLOCK_DIE (stmt) = stmt_die;
24053 }
24054
24055 if (BLOCK_ABSTRACT (stmt))
24056 {
24057 if (old_die)
24058 {
24059 /* This must have been generated early and it won't even
24060 need location information since it's a DW_AT_inline
24061 function. */
24062 if (flag_checking)
24063 for (dw_die_ref c = context_die; c; c = c->die_parent)
24064 if (c->die_tag == DW_TAG_inlined_subroutine
24065 || c->die_tag == DW_TAG_subprogram)
24066 {
24067 gcc_assert (get_AT (c, DW_AT_inline));
24068 break;
24069 }
24070 return;
24071 }
24072 }
24073 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
24074 {
24075 /* If this is an inlined instance, create a new lexical die for
24076 anything below to attach DW_AT_abstract_origin to. */
24077 if (old_die)
24078 {
24079 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24080 BLOCK_DIE (stmt) = stmt_die;
24081 old_die = NULL;
24082 }
24083
24084 tree origin = block_ultimate_origin (stmt);
24085 if (origin != NULL_TREE && origin != stmt)
24086 add_abstract_origin_attribute (stmt_die, origin);
24087 }
24088
24089 if (old_die)
24090 stmt_die = old_die;
24091
24092 /* A non abstract block whose blocks have already been reordered
24093 should have the instruction range for this block. If so, set the
24094 high/low attributes. */
24095 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
24096 {
24097 gcc_assert (stmt_die);
24098 add_high_low_attributes (stmt, stmt_die);
24099 }
24100
24101 decls_for_scope (stmt, stmt_die);
24102 }
24103
24104 /* Generate a DIE for an inlined subprogram. */
24105
24106 static void
24107 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24108 {
24109 tree decl;
24110
24111 /* The instance of function that is effectively being inlined shall not
24112 be abstract. */
24113 gcc_assert (! BLOCK_ABSTRACT (stmt));
24114
24115 decl = block_ultimate_origin (stmt);
24116
24117 /* Make sure any inlined functions are known to be inlineable. */
24118 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24119 || cgraph_function_possibly_inlined_p (decl));
24120
24121 if (! BLOCK_ABSTRACT (stmt))
24122 {
24123 dw_die_ref subr_die
24124 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24125
24126 if (call_arg_locations || debug_inline_points)
24127 BLOCK_DIE (stmt) = subr_die;
24128 add_abstract_origin_attribute (subr_die, decl);
24129 if (TREE_ASM_WRITTEN (stmt))
24130 add_high_low_attributes (stmt, subr_die);
24131 add_call_src_coords_attributes (stmt, subr_die);
24132
24133 decls_for_scope (stmt, subr_die);
24134 }
24135 }
24136
24137 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24138 the comment for VLR_CONTEXT. */
24139
24140 static void
24141 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24142 {
24143 dw_die_ref decl_die;
24144
24145 if (TREE_TYPE (decl) == error_mark_node)
24146 return;
24147
24148 decl_die = new_die (DW_TAG_member, context_die, decl);
24149 add_name_and_src_coords_attributes (decl_die, decl);
24150 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24151 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24152 context_die);
24153
24154 if (DECL_BIT_FIELD_TYPE (decl))
24155 {
24156 add_byte_size_attribute (decl_die, decl);
24157 add_bit_size_attribute (decl_die, decl);
24158 add_bit_offset_attribute (decl_die, decl, ctx);
24159 }
24160
24161 add_alignment_attribute (decl_die, decl);
24162
24163 /* If we have a variant part offset, then we are supposed to process a member
24164 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24165 trees. */
24166 gcc_assert (ctx->variant_part_offset == NULL_TREE
24167 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24168 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24169 add_data_member_location_attribute (decl_die, decl, ctx);
24170
24171 if (DECL_ARTIFICIAL (decl))
24172 add_AT_flag (decl_die, DW_AT_artificial, 1);
24173
24174 add_accessibility_attribute (decl_die, decl);
24175
24176 /* Equate decl number to die, so that we can look up this decl later on. */
24177 equate_decl_number_to_die (decl, decl_die);
24178 }
24179
24180 /* Generate a DIE for a pointer to a member type. TYPE can be an
24181 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24182 pointer to member function. */
24183
24184 static void
24185 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24186 {
24187 if (lookup_type_die (type))
24188 return;
24189
24190 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24191 scope_die_for (type, context_die), type);
24192
24193 equate_type_number_to_die (type, ptr_die);
24194 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24195 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24196 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24197 context_die);
24198 add_alignment_attribute (ptr_die, type);
24199
24200 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24201 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24202 {
24203 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24204 add_AT_loc (ptr_die, DW_AT_use_location, op);
24205 }
24206 }
24207
24208 static char *producer_string;
24209
24210 /* Return a heap allocated producer string including command line options
24211 if -grecord-gcc-switches. */
24212
24213 static char *
24214 gen_producer_string (void)
24215 {
24216 size_t j;
24217 auto_vec<const char *> switches;
24218 const char *language_string = lang_hooks.name;
24219 char *producer, *tail;
24220 const char *p;
24221 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24222 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24223
24224 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24225 switch (save_decoded_options[j].opt_index)
24226 {
24227 case OPT_o:
24228 case OPT_d:
24229 case OPT_dumpbase:
24230 case OPT_dumpdir:
24231 case OPT_auxbase:
24232 case OPT_auxbase_strip:
24233 case OPT_quiet:
24234 case OPT_version:
24235 case OPT_v:
24236 case OPT_w:
24237 case OPT_L:
24238 case OPT_D:
24239 case OPT_I:
24240 case OPT_U:
24241 case OPT_SPECIAL_unknown:
24242 case OPT_SPECIAL_ignore:
24243 case OPT_SPECIAL_program_name:
24244 case OPT_SPECIAL_input_file:
24245 case OPT_grecord_gcc_switches:
24246 case OPT__output_pch_:
24247 case OPT_fdiagnostics_show_location_:
24248 case OPT_fdiagnostics_show_option:
24249 case OPT_fdiagnostics_show_caret:
24250 case OPT_fdiagnostics_color_:
24251 case OPT_fverbose_asm:
24252 case OPT____:
24253 case OPT__sysroot_:
24254 case OPT_nostdinc:
24255 case OPT_nostdinc__:
24256 case OPT_fpreprocessed:
24257 case OPT_fltrans_output_list_:
24258 case OPT_fresolution_:
24259 case OPT_fdebug_prefix_map_:
24260 case OPT_fmacro_prefix_map_:
24261 case OPT_ffile_prefix_map_:
24262 case OPT_fcompare_debug:
24263 case OPT_fchecking:
24264 case OPT_fchecking_:
24265 /* Ignore these. */
24266 continue;
24267 default:
24268 if (cl_options[save_decoded_options[j].opt_index].flags
24269 & CL_NO_DWARF_RECORD)
24270 continue;
24271 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24272 == '-');
24273 switch (save_decoded_options[j].canonical_option[0][1])
24274 {
24275 case 'M':
24276 case 'i':
24277 case 'W':
24278 continue;
24279 case 'f':
24280 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24281 "dump", 4) == 0)
24282 continue;
24283 break;
24284 default:
24285 break;
24286 }
24287 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24288 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24289 break;
24290 }
24291
24292 producer = XNEWVEC (char, plen + 1 + len + 1);
24293 tail = producer;
24294 sprintf (tail, "%s %s", language_string, version_string);
24295 tail += plen;
24296
24297 FOR_EACH_VEC_ELT (switches, j, p)
24298 {
24299 len = strlen (p);
24300 *tail = ' ';
24301 memcpy (tail + 1, p, len);
24302 tail += len + 1;
24303 }
24304
24305 *tail = '\0';
24306 return producer;
24307 }
24308
24309 /* Given a C and/or C++ language/version string return the "highest".
24310 C++ is assumed to be "higher" than C in this case. Used for merging
24311 LTO translation unit languages. */
24312 static const char *
24313 highest_c_language (const char *lang1, const char *lang2)
24314 {
24315 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24316 return "GNU C++17";
24317 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24318 return "GNU C++14";
24319 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24320 return "GNU C++11";
24321 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24322 return "GNU C++98";
24323
24324 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24325 return "GNU C17";
24326 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24327 return "GNU C11";
24328 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24329 return "GNU C99";
24330 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24331 return "GNU C89";
24332
24333 gcc_unreachable ();
24334 }
24335
24336
24337 /* Generate the DIE for the compilation unit. */
24338
24339 static dw_die_ref
24340 gen_compile_unit_die (const char *filename)
24341 {
24342 dw_die_ref die;
24343 const char *language_string = lang_hooks.name;
24344 int language;
24345
24346 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24347
24348 if (filename)
24349 {
24350 add_name_attribute (die, filename);
24351 /* Don't add cwd for <built-in>. */
24352 if (filename[0] != '<')
24353 add_comp_dir_attribute (die);
24354 }
24355
24356 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24357
24358 /* If our producer is LTO try to figure out a common language to use
24359 from the global list of translation units. */
24360 if (strcmp (language_string, "GNU GIMPLE") == 0)
24361 {
24362 unsigned i;
24363 tree t;
24364 const char *common_lang = NULL;
24365
24366 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24367 {
24368 if (!TRANSLATION_UNIT_LANGUAGE (t))
24369 continue;
24370 if (!common_lang)
24371 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24372 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24373 ;
24374 else if (strncmp (common_lang, "GNU C", 5) == 0
24375 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24376 /* Mixing C and C++ is ok, use C++ in that case. */
24377 common_lang = highest_c_language (common_lang,
24378 TRANSLATION_UNIT_LANGUAGE (t));
24379 else
24380 {
24381 /* Fall back to C. */
24382 common_lang = NULL;
24383 break;
24384 }
24385 }
24386
24387 if (common_lang)
24388 language_string = common_lang;
24389 }
24390
24391 language = DW_LANG_C;
24392 if (strncmp (language_string, "GNU C", 5) == 0
24393 && ISDIGIT (language_string[5]))
24394 {
24395 language = DW_LANG_C89;
24396 if (dwarf_version >= 3 || !dwarf_strict)
24397 {
24398 if (strcmp (language_string, "GNU C89") != 0)
24399 language = DW_LANG_C99;
24400
24401 if (dwarf_version >= 5 /* || !dwarf_strict */)
24402 if (strcmp (language_string, "GNU C11") == 0
24403 || strcmp (language_string, "GNU C17") == 0)
24404 language = DW_LANG_C11;
24405 }
24406 }
24407 else if (strncmp (language_string, "GNU C++", 7) == 0)
24408 {
24409 language = DW_LANG_C_plus_plus;
24410 if (dwarf_version >= 5 /* || !dwarf_strict */)
24411 {
24412 if (strcmp (language_string, "GNU C++11") == 0)
24413 language = DW_LANG_C_plus_plus_11;
24414 else if (strcmp (language_string, "GNU C++14") == 0)
24415 language = DW_LANG_C_plus_plus_14;
24416 else if (strcmp (language_string, "GNU C++17") == 0)
24417 /* For now. */
24418 language = DW_LANG_C_plus_plus_14;
24419 }
24420 }
24421 else if (strcmp (language_string, "GNU F77") == 0)
24422 language = DW_LANG_Fortran77;
24423 else if (dwarf_version >= 3 || !dwarf_strict)
24424 {
24425 if (strcmp (language_string, "GNU Ada") == 0)
24426 language = DW_LANG_Ada95;
24427 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24428 {
24429 language = DW_LANG_Fortran95;
24430 if (dwarf_version >= 5 /* || !dwarf_strict */)
24431 {
24432 if (strcmp (language_string, "GNU Fortran2003") == 0)
24433 language = DW_LANG_Fortran03;
24434 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24435 language = DW_LANG_Fortran08;
24436 }
24437 }
24438 else if (strcmp (language_string, "GNU Objective-C") == 0)
24439 language = DW_LANG_ObjC;
24440 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24441 language = DW_LANG_ObjC_plus_plus;
24442 else if (dwarf_version >= 5 || !dwarf_strict)
24443 {
24444 if (strcmp (language_string, "GNU Go") == 0)
24445 language = DW_LANG_Go;
24446 }
24447 }
24448 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24449 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24450 language = DW_LANG_Fortran90;
24451 /* Likewise for Ada. */
24452 else if (strcmp (language_string, "GNU Ada") == 0)
24453 language = DW_LANG_Ada83;
24454
24455 add_AT_unsigned (die, DW_AT_language, language);
24456
24457 switch (language)
24458 {
24459 case DW_LANG_Fortran77:
24460 case DW_LANG_Fortran90:
24461 case DW_LANG_Fortran95:
24462 case DW_LANG_Fortran03:
24463 case DW_LANG_Fortran08:
24464 /* Fortran has case insensitive identifiers and the front-end
24465 lowercases everything. */
24466 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24467 break;
24468 default:
24469 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24470 break;
24471 }
24472 return die;
24473 }
24474
24475 /* Generate the DIE for a base class. */
24476
24477 static void
24478 gen_inheritance_die (tree binfo, tree access, tree type,
24479 dw_die_ref context_die)
24480 {
24481 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24482 struct vlr_context ctx = { type, NULL };
24483
24484 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24485 context_die);
24486 add_data_member_location_attribute (die, binfo, &ctx);
24487
24488 if (BINFO_VIRTUAL_P (binfo))
24489 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24490
24491 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24492 children, otherwise the default is DW_ACCESS_public. In DWARF2
24493 the default has always been DW_ACCESS_private. */
24494 if (access == access_public_node)
24495 {
24496 if (dwarf_version == 2
24497 || context_die->die_tag == DW_TAG_class_type)
24498 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24499 }
24500 else if (access == access_protected_node)
24501 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24502 else if (dwarf_version > 2
24503 && context_die->die_tag != DW_TAG_class_type)
24504 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24505 }
24506
24507 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24508 structure. */
24509 static bool
24510 is_variant_part (tree decl)
24511 {
24512 return (TREE_CODE (decl) == FIELD_DECL
24513 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24514 }
24515
24516 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24517 return the FIELD_DECL. Return NULL_TREE otherwise. */
24518
24519 static tree
24520 analyze_discr_in_predicate (tree operand, tree struct_type)
24521 {
24522 bool continue_stripping = true;
24523 while (continue_stripping)
24524 switch (TREE_CODE (operand))
24525 {
24526 CASE_CONVERT:
24527 operand = TREE_OPERAND (operand, 0);
24528 break;
24529 default:
24530 continue_stripping = false;
24531 break;
24532 }
24533
24534 /* Match field access to members of struct_type only. */
24535 if (TREE_CODE (operand) == COMPONENT_REF
24536 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24537 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24538 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24539 return TREE_OPERAND (operand, 1);
24540 else
24541 return NULL_TREE;
24542 }
24543
24544 /* Check that SRC is a constant integer that can be represented as a native
24545 integer constant (either signed or unsigned). If so, store it into DEST and
24546 return true. Return false otherwise. */
24547
24548 static bool
24549 get_discr_value (tree src, dw_discr_value *dest)
24550 {
24551 tree discr_type = TREE_TYPE (src);
24552
24553 if (lang_hooks.types.get_debug_type)
24554 {
24555 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24556 if (debug_type != NULL)
24557 discr_type = debug_type;
24558 }
24559
24560 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24561 return false;
24562
24563 /* Signedness can vary between the original type and the debug type. This
24564 can happen for character types in Ada for instance: the character type
24565 used for code generation can be signed, to be compatible with the C one,
24566 but from a debugger point of view, it must be unsigned. */
24567 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24568 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24569
24570 if (is_orig_unsigned != is_debug_unsigned)
24571 src = fold_convert (discr_type, src);
24572
24573 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24574 return false;
24575
24576 dest->pos = is_debug_unsigned;
24577 if (is_debug_unsigned)
24578 dest->v.uval = tree_to_uhwi (src);
24579 else
24580 dest->v.sval = tree_to_shwi (src);
24581
24582 return true;
24583 }
24584
24585 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24586 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24587 store NULL_TREE in DISCR_DECL. Otherwise:
24588
24589 - store the discriminant field in STRUCT_TYPE that controls the variant
24590 part to *DISCR_DECL
24591
24592 - put in *DISCR_LISTS_P an array where for each variant, the item
24593 represents the corresponding matching list of discriminant values.
24594
24595 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24596 the above array.
24597
24598 Note that when the array is allocated (i.e. when the analysis is
24599 successful), it is up to the caller to free the array. */
24600
24601 static void
24602 analyze_variants_discr (tree variant_part_decl,
24603 tree struct_type,
24604 tree *discr_decl,
24605 dw_discr_list_ref **discr_lists_p,
24606 unsigned *discr_lists_length)
24607 {
24608 tree variant_part_type = TREE_TYPE (variant_part_decl);
24609 tree variant;
24610 dw_discr_list_ref *discr_lists;
24611 unsigned i;
24612
24613 /* Compute how many variants there are in this variant part. */
24614 *discr_lists_length = 0;
24615 for (variant = TYPE_FIELDS (variant_part_type);
24616 variant != NULL_TREE;
24617 variant = DECL_CHAIN (variant))
24618 ++*discr_lists_length;
24619
24620 *discr_decl = NULL_TREE;
24621 *discr_lists_p
24622 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24623 sizeof (**discr_lists_p));
24624 discr_lists = *discr_lists_p;
24625
24626 /* And then analyze all variants to extract discriminant information for all
24627 of them. This analysis is conservative: as soon as we detect something we
24628 do not support, abort everything and pretend we found nothing. */
24629 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24630 variant != NULL_TREE;
24631 variant = DECL_CHAIN (variant), ++i)
24632 {
24633 tree match_expr = DECL_QUALIFIER (variant);
24634
24635 /* Now, try to analyze the predicate and deduce a discriminant for
24636 it. */
24637 if (match_expr == boolean_true_node)
24638 /* Typically happens for the default variant: it matches all cases that
24639 previous variants rejected. Don't output any matching value for
24640 this one. */
24641 continue;
24642
24643 /* The following loop tries to iterate over each discriminant
24644 possibility: single values or ranges. */
24645 while (match_expr != NULL_TREE)
24646 {
24647 tree next_round_match_expr;
24648 tree candidate_discr = NULL_TREE;
24649 dw_discr_list_ref new_node = NULL;
24650
24651 /* Possibilities are matched one after the other by nested
24652 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24653 continue with the rest at next iteration. */
24654 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24655 {
24656 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24657 match_expr = TREE_OPERAND (match_expr, 1);
24658 }
24659 else
24660 next_round_match_expr = NULL_TREE;
24661
24662 if (match_expr == boolean_false_node)
24663 /* This sub-expression matches nothing: just wait for the next
24664 one. */
24665 ;
24666
24667 else if (TREE_CODE (match_expr) == EQ_EXPR)
24668 {
24669 /* We are matching: <discr_field> == <integer_cst>
24670 This sub-expression matches a single value. */
24671 tree integer_cst = TREE_OPERAND (match_expr, 1);
24672
24673 candidate_discr
24674 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24675 struct_type);
24676
24677 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24678 if (!get_discr_value (integer_cst,
24679 &new_node->dw_discr_lower_bound))
24680 goto abort;
24681 new_node->dw_discr_range = false;
24682 }
24683
24684 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24685 {
24686 /* We are matching:
24687 <discr_field> > <integer_cst>
24688 && <discr_field> < <integer_cst>.
24689 This sub-expression matches the range of values between the
24690 two matched integer constants. Note that comparisons can be
24691 inclusive or exclusive. */
24692 tree candidate_discr_1, candidate_discr_2;
24693 tree lower_cst, upper_cst;
24694 bool lower_cst_included, upper_cst_included;
24695 tree lower_op = TREE_OPERAND (match_expr, 0);
24696 tree upper_op = TREE_OPERAND (match_expr, 1);
24697
24698 /* When the comparison is exclusive, the integer constant is not
24699 the discriminant range bound we are looking for: we will have
24700 to increment or decrement it. */
24701 if (TREE_CODE (lower_op) == GE_EXPR)
24702 lower_cst_included = true;
24703 else if (TREE_CODE (lower_op) == GT_EXPR)
24704 lower_cst_included = false;
24705 else
24706 goto abort;
24707
24708 if (TREE_CODE (upper_op) == LE_EXPR)
24709 upper_cst_included = true;
24710 else if (TREE_CODE (upper_op) == LT_EXPR)
24711 upper_cst_included = false;
24712 else
24713 goto abort;
24714
24715 /* Extract the discriminant from the first operand and check it
24716 is consistant with the same analysis in the second
24717 operand. */
24718 candidate_discr_1
24719 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24720 struct_type);
24721 candidate_discr_2
24722 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24723 struct_type);
24724 if (candidate_discr_1 == candidate_discr_2)
24725 candidate_discr = candidate_discr_1;
24726 else
24727 goto abort;
24728
24729 /* Extract bounds from both. */
24730 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24731 lower_cst = TREE_OPERAND (lower_op, 1);
24732 upper_cst = TREE_OPERAND (upper_op, 1);
24733
24734 if (!lower_cst_included)
24735 lower_cst
24736 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24737 build_int_cst (TREE_TYPE (lower_cst), 1));
24738 if (!upper_cst_included)
24739 upper_cst
24740 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24741 build_int_cst (TREE_TYPE (upper_cst), 1));
24742
24743 if (!get_discr_value (lower_cst,
24744 &new_node->dw_discr_lower_bound)
24745 || !get_discr_value (upper_cst,
24746 &new_node->dw_discr_upper_bound))
24747 goto abort;
24748
24749 new_node->dw_discr_range = true;
24750 }
24751
24752 else
24753 /* Unsupported sub-expression: we cannot determine the set of
24754 matching discriminant values. Abort everything. */
24755 goto abort;
24756
24757 /* If the discriminant info is not consistant with what we saw so
24758 far, consider the analysis failed and abort everything. */
24759 if (candidate_discr == NULL_TREE
24760 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24761 goto abort;
24762 else
24763 *discr_decl = candidate_discr;
24764
24765 if (new_node != NULL)
24766 {
24767 new_node->dw_discr_next = discr_lists[i];
24768 discr_lists[i] = new_node;
24769 }
24770 match_expr = next_round_match_expr;
24771 }
24772 }
24773
24774 /* If we reach this point, we could match everything we were interested
24775 in. */
24776 return;
24777
24778 abort:
24779 /* Clean all data structure and return no result. */
24780 free (*discr_lists_p);
24781 *discr_lists_p = NULL;
24782 *discr_decl = NULL_TREE;
24783 }
24784
24785 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24786 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24787 under CONTEXT_DIE.
24788
24789 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24790 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24791 this type, which are record types, represent the available variants and each
24792 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24793 values are inferred from these attributes.
24794
24795 In trees, the offsets for the fields inside these sub-records are relative
24796 to the variant part itself, whereas the corresponding DIEs should have
24797 offset attributes that are relative to the embedding record base address.
24798 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24799 must be an expression that computes the offset of the variant part to
24800 describe in DWARF. */
24801
24802 static void
24803 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24804 dw_die_ref context_die)
24805 {
24806 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24807 tree variant_part_offset = vlr_ctx->variant_part_offset;
24808 struct loc_descr_context ctx = {
24809 vlr_ctx->struct_type, /* context_type */
24810 NULL_TREE, /* base_decl */
24811 NULL, /* dpi */
24812 false, /* placeholder_arg */
24813 false /* placeholder_seen */
24814 };
24815
24816 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24817 NULL_TREE if there is no such field. */
24818 tree discr_decl = NULL_TREE;
24819 dw_discr_list_ref *discr_lists;
24820 unsigned discr_lists_length = 0;
24821 unsigned i;
24822
24823 dw_die_ref dwarf_proc_die = NULL;
24824 dw_die_ref variant_part_die
24825 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24826
24827 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24828
24829 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24830 &discr_decl, &discr_lists, &discr_lists_length);
24831
24832 if (discr_decl != NULL_TREE)
24833 {
24834 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24835
24836 if (discr_die)
24837 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24838 else
24839 /* We have no DIE for the discriminant, so just discard all
24840 discrimimant information in the output. */
24841 discr_decl = NULL_TREE;
24842 }
24843
24844 /* If the offset for this variant part is more complex than a constant,
24845 create a DWARF procedure for it so that we will not have to generate DWARF
24846 expressions for it for each member. */
24847 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24848 && (dwarf_version >= 3 || !dwarf_strict))
24849 {
24850 const tree dwarf_proc_fndecl
24851 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24852 build_function_type (TREE_TYPE (variant_part_offset),
24853 NULL_TREE));
24854 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24855 const dw_loc_descr_ref dwarf_proc_body
24856 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24857
24858 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24859 dwarf_proc_fndecl, context_die);
24860 if (dwarf_proc_die != NULL)
24861 variant_part_offset = dwarf_proc_call;
24862 }
24863
24864 /* Output DIEs for all variants. */
24865 i = 0;
24866 for (tree variant = TYPE_FIELDS (variant_part_type);
24867 variant != NULL_TREE;
24868 variant = DECL_CHAIN (variant), ++i)
24869 {
24870 tree variant_type = TREE_TYPE (variant);
24871 dw_die_ref variant_die;
24872
24873 /* All variants (i.e. members of a variant part) are supposed to be
24874 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24875 under these records. */
24876 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24877
24878 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24879 equate_decl_number_to_die (variant, variant_die);
24880
24881 /* Output discriminant values this variant matches, if any. */
24882 if (discr_decl == NULL || discr_lists[i] == NULL)
24883 /* In the case we have discriminant information at all, this is
24884 probably the default variant: as the standard says, don't
24885 output any discriminant value/list attribute. */
24886 ;
24887 else if (discr_lists[i]->dw_discr_next == NULL
24888 && !discr_lists[i]->dw_discr_range)
24889 /* If there is only one accepted value, don't bother outputting a
24890 list. */
24891 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24892 else
24893 add_discr_list (variant_die, discr_lists[i]);
24894
24895 for (tree member = TYPE_FIELDS (variant_type);
24896 member != NULL_TREE;
24897 member = DECL_CHAIN (member))
24898 {
24899 struct vlr_context vlr_sub_ctx = {
24900 vlr_ctx->struct_type, /* struct_type */
24901 NULL /* variant_part_offset */
24902 };
24903 if (is_variant_part (member))
24904 {
24905 /* All offsets for fields inside variant parts are relative to
24906 the top-level embedding RECORD_TYPE's base address. On the
24907 other hand, offsets in GCC's types are relative to the
24908 nested-most variant part. So we have to sum offsets each time
24909 we recurse. */
24910
24911 vlr_sub_ctx.variant_part_offset
24912 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24913 variant_part_offset, byte_position (member));
24914 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24915 }
24916 else
24917 {
24918 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24919 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24920 }
24921 }
24922 }
24923
24924 free (discr_lists);
24925 }
24926
24927 /* Generate a DIE for a class member. */
24928
24929 static void
24930 gen_member_die (tree type, dw_die_ref context_die)
24931 {
24932 tree member;
24933 tree binfo = TYPE_BINFO (type);
24934
24935 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24936
24937 /* If this is not an incomplete type, output descriptions of each of its
24938 members. Note that as we output the DIEs necessary to represent the
24939 members of this record or union type, we will also be trying to output
24940 DIEs to represent the *types* of those members. However the `type'
24941 function (above) will specifically avoid generating type DIEs for member
24942 types *within* the list of member DIEs for this (containing) type except
24943 for those types (of members) which are explicitly marked as also being
24944 members of this (containing) type themselves. The g++ front- end can
24945 force any given type to be treated as a member of some other (containing)
24946 type by setting the TYPE_CONTEXT of the given (member) type to point to
24947 the TREE node representing the appropriate (containing) type. */
24948
24949 /* First output info about the base classes. */
24950 if (binfo)
24951 {
24952 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24953 int i;
24954 tree base;
24955
24956 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24957 gen_inheritance_die (base,
24958 (accesses ? (*accesses)[i] : access_public_node),
24959 type,
24960 context_die);
24961 }
24962
24963 /* Now output info about the data members and type members. */
24964 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24965 {
24966 struct vlr_context vlr_ctx = { type, NULL_TREE };
24967 bool static_inline_p
24968 = (TREE_STATIC (member)
24969 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24970 != -1));
24971
24972 /* Ignore clones. */
24973 if (DECL_ABSTRACT_ORIGIN (member))
24974 continue;
24975
24976 /* If we thought we were generating minimal debug info for TYPE
24977 and then changed our minds, some of the member declarations
24978 may have already been defined. Don't define them again, but
24979 do put them in the right order. */
24980
24981 if (dw_die_ref child = lookup_decl_die (member))
24982 {
24983 /* Handle inline static data members, which only have in-class
24984 declarations. */
24985 dw_die_ref ref = NULL;
24986 if (child->die_tag == DW_TAG_variable
24987 && child->die_parent == comp_unit_die ())
24988 {
24989 ref = get_AT_ref (child, DW_AT_specification);
24990 /* For C++17 inline static data members followed by redundant
24991 out of class redeclaration, we might get here with
24992 child being the DIE created for the out of class
24993 redeclaration and with its DW_AT_specification being
24994 the DIE created for in-class definition. We want to
24995 reparent the latter, and don't want to create another
24996 DIE with DW_AT_specification in that case, because
24997 we already have one. */
24998 if (ref
24999 && static_inline_p
25000 && ref->die_tag == DW_TAG_variable
25001 && ref->die_parent == comp_unit_die ()
25002 && get_AT (ref, DW_AT_specification) == NULL)
25003 {
25004 child = ref;
25005 ref = NULL;
25006 static_inline_p = false;
25007 }
25008 }
25009
25010 if (child->die_tag == DW_TAG_variable
25011 && child->die_parent == comp_unit_die ()
25012 && ref == NULL)
25013 {
25014 reparent_child (child, context_die);
25015 if (dwarf_version < 5)
25016 child->die_tag = DW_TAG_member;
25017 }
25018 else
25019 splice_child_die (context_die, child);
25020 }
25021
25022 /* Do not generate standard DWARF for variant parts if we are generating
25023 the corresponding GNAT encodings: DIEs generated for both would
25024 conflict in our mappings. */
25025 else if (is_variant_part (member)
25026 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25027 {
25028 vlr_ctx.variant_part_offset = byte_position (member);
25029 gen_variant_part (member, &vlr_ctx, context_die);
25030 }
25031 else
25032 {
25033 vlr_ctx.variant_part_offset = NULL_TREE;
25034 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25035 }
25036
25037 /* For C++ inline static data members emit immediately a DW_TAG_variable
25038 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25039 DW_AT_specification. */
25040 if (static_inline_p)
25041 {
25042 int old_extern = DECL_EXTERNAL (member);
25043 DECL_EXTERNAL (member) = 0;
25044 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25045 DECL_EXTERNAL (member) = old_extern;
25046 }
25047 }
25048 }
25049
25050 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25051 is set, we pretend that the type was never defined, so we only get the
25052 member DIEs needed by later specification DIEs. */
25053
25054 static void
25055 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25056 enum debug_info_usage usage)
25057 {
25058 if (TREE_ASM_WRITTEN (type))
25059 {
25060 /* Fill in the bound of variable-length fields in late dwarf if
25061 still incomplete. */
25062 if (!early_dwarf && variably_modified_type_p (type, NULL))
25063 for (tree member = TYPE_FIELDS (type);
25064 member;
25065 member = DECL_CHAIN (member))
25066 fill_variable_array_bounds (TREE_TYPE (member));
25067 return;
25068 }
25069
25070 dw_die_ref type_die = lookup_type_die (type);
25071 dw_die_ref scope_die = 0;
25072 int nested = 0;
25073 int complete = (TYPE_SIZE (type)
25074 && (! TYPE_STUB_DECL (type)
25075 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25076 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25077 complete = complete && should_emit_struct_debug (type, usage);
25078
25079 if (type_die && ! complete)
25080 return;
25081
25082 if (TYPE_CONTEXT (type) != NULL_TREE
25083 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25084 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25085 nested = 1;
25086
25087 scope_die = scope_die_for (type, context_die);
25088
25089 /* Generate child dies for template paramaters. */
25090 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25091 schedule_generic_params_dies_gen (type);
25092
25093 if (! type_die || (nested && is_cu_die (scope_die)))
25094 /* First occurrence of type or toplevel definition of nested class. */
25095 {
25096 dw_die_ref old_die = type_die;
25097
25098 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25099 ? record_type_tag (type) : DW_TAG_union_type,
25100 scope_die, type);
25101 equate_type_number_to_die (type, type_die);
25102 if (old_die)
25103 add_AT_specification (type_die, old_die);
25104 else
25105 add_name_attribute (type_die, type_tag (type));
25106 }
25107 else
25108 remove_AT (type_die, DW_AT_declaration);
25109
25110 /* If this type has been completed, then give it a byte_size attribute and
25111 then give a list of members. */
25112 if (complete && !ns_decl)
25113 {
25114 /* Prevent infinite recursion in cases where the type of some member of
25115 this type is expressed in terms of this type itself. */
25116 TREE_ASM_WRITTEN (type) = 1;
25117 add_byte_size_attribute (type_die, type);
25118 add_alignment_attribute (type_die, type);
25119 if (TYPE_STUB_DECL (type) != NULL_TREE)
25120 {
25121 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25122 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25123 }
25124
25125 /* If the first reference to this type was as the return type of an
25126 inline function, then it may not have a parent. Fix this now. */
25127 if (type_die->die_parent == NULL)
25128 add_child_die (scope_die, type_die);
25129
25130 gen_member_die (type, type_die);
25131
25132 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25133 if (TYPE_ARTIFICIAL (type))
25134 add_AT_flag (type_die, DW_AT_artificial, 1);
25135
25136 /* GNU extension: Record what type our vtable lives in. */
25137 if (TYPE_VFIELD (type))
25138 {
25139 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25140
25141 gen_type_die (vtype, context_die);
25142 add_AT_die_ref (type_die, DW_AT_containing_type,
25143 lookup_type_die (vtype));
25144 }
25145 }
25146 else
25147 {
25148 add_AT_flag (type_die, DW_AT_declaration, 1);
25149
25150 /* We don't need to do this for function-local types. */
25151 if (TYPE_STUB_DECL (type)
25152 && ! decl_function_context (TYPE_STUB_DECL (type)))
25153 vec_safe_push (incomplete_types, type);
25154 }
25155
25156 if (get_AT (type_die, DW_AT_name))
25157 add_pubtype (type, type_die);
25158 }
25159
25160 /* Generate a DIE for a subroutine _type_. */
25161
25162 static void
25163 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25164 {
25165 tree return_type = TREE_TYPE (type);
25166 dw_die_ref subr_die
25167 = new_die (DW_TAG_subroutine_type,
25168 scope_die_for (type, context_die), type);
25169
25170 equate_type_number_to_die (type, subr_die);
25171 add_prototyped_attribute (subr_die, type);
25172 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25173 context_die);
25174 add_alignment_attribute (subr_die, type);
25175 gen_formal_types_die (type, subr_die);
25176
25177 if (get_AT (subr_die, DW_AT_name))
25178 add_pubtype (type, subr_die);
25179 if ((dwarf_version >= 5 || !dwarf_strict)
25180 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25181 add_AT_flag (subr_die, DW_AT_reference, 1);
25182 if ((dwarf_version >= 5 || !dwarf_strict)
25183 && lang_hooks.types.type_dwarf_attribute (type,
25184 DW_AT_rvalue_reference) != -1)
25185 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25186 }
25187
25188 /* Generate a DIE for a type definition. */
25189
25190 static void
25191 gen_typedef_die (tree decl, dw_die_ref context_die)
25192 {
25193 dw_die_ref type_die;
25194 tree type;
25195
25196 if (TREE_ASM_WRITTEN (decl))
25197 {
25198 if (DECL_ORIGINAL_TYPE (decl))
25199 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25200 return;
25201 }
25202
25203 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25204 checks in process_scope_var and modified_type_die), this should be called
25205 only for original types. */
25206 gcc_assert (decl_ultimate_origin (decl) == NULL
25207 || decl_ultimate_origin (decl) == decl);
25208
25209 TREE_ASM_WRITTEN (decl) = 1;
25210 type_die = new_die (DW_TAG_typedef, context_die, decl);
25211
25212 add_name_and_src_coords_attributes (type_die, decl);
25213 if (DECL_ORIGINAL_TYPE (decl))
25214 {
25215 type = DECL_ORIGINAL_TYPE (decl);
25216 if (type == error_mark_node)
25217 return;
25218
25219 gcc_assert (type != TREE_TYPE (decl));
25220 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25221 }
25222 else
25223 {
25224 type = TREE_TYPE (decl);
25225 if (type == error_mark_node)
25226 return;
25227
25228 if (is_naming_typedef_decl (TYPE_NAME (type)))
25229 {
25230 /* Here, we are in the case of decl being a typedef naming
25231 an anonymous type, e.g:
25232 typedef struct {...} foo;
25233 In that case TREE_TYPE (decl) is not a typedef variant
25234 type and TYPE_NAME of the anonymous type is set to the
25235 TYPE_DECL of the typedef. This construct is emitted by
25236 the C++ FE.
25237
25238 TYPE is the anonymous struct named by the typedef
25239 DECL. As we need the DW_AT_type attribute of the
25240 DW_TAG_typedef to point to the DIE of TYPE, let's
25241 generate that DIE right away. add_type_attribute
25242 called below will then pick (via lookup_type_die) that
25243 anonymous struct DIE. */
25244 if (!TREE_ASM_WRITTEN (type))
25245 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25246
25247 /* This is a GNU Extension. We are adding a
25248 DW_AT_linkage_name attribute to the DIE of the
25249 anonymous struct TYPE. The value of that attribute
25250 is the name of the typedef decl naming the anonymous
25251 struct. This greatly eases the work of consumers of
25252 this debug info. */
25253 add_linkage_name_raw (lookup_type_die (type), decl);
25254 }
25255 }
25256
25257 add_type_attribute (type_die, type, decl_quals (decl), false,
25258 context_die);
25259
25260 if (is_naming_typedef_decl (decl))
25261 /* We want that all subsequent calls to lookup_type_die with
25262 TYPE in argument yield the DW_TAG_typedef we have just
25263 created. */
25264 equate_type_number_to_die (type, type_die);
25265
25266 add_alignment_attribute (type_die, TREE_TYPE (decl));
25267
25268 add_accessibility_attribute (type_die, decl);
25269
25270 if (DECL_ABSTRACT_P (decl))
25271 equate_decl_number_to_die (decl, type_die);
25272
25273 if (get_AT (type_die, DW_AT_name))
25274 add_pubtype (decl, type_die);
25275 }
25276
25277 /* Generate a DIE for a struct, class, enum or union type. */
25278
25279 static void
25280 gen_tagged_type_die (tree type,
25281 dw_die_ref context_die,
25282 enum debug_info_usage usage)
25283 {
25284 if (type == NULL_TREE
25285 || !is_tagged_type (type))
25286 return;
25287
25288 if (TREE_ASM_WRITTEN (type))
25289 ;
25290 /* If this is a nested type whose containing class hasn't been written
25291 out yet, writing it out will cover this one, too. This does not apply
25292 to instantiations of member class templates; they need to be added to
25293 the containing class as they are generated. FIXME: This hurts the
25294 idea of combining type decls from multiple TUs, since we can't predict
25295 what set of template instantiations we'll get. */
25296 else if (TYPE_CONTEXT (type)
25297 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25298 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25299 {
25300 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25301
25302 if (TREE_ASM_WRITTEN (type))
25303 return;
25304
25305 /* If that failed, attach ourselves to the stub. */
25306 context_die = lookup_type_die (TYPE_CONTEXT (type));
25307 }
25308 else if (TYPE_CONTEXT (type) != NULL_TREE
25309 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25310 {
25311 /* If this type is local to a function that hasn't been written
25312 out yet, use a NULL context for now; it will be fixed up in
25313 decls_for_scope. */
25314 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25315 /* A declaration DIE doesn't count; nested types need to go in the
25316 specification. */
25317 if (context_die && is_declaration_die (context_die))
25318 context_die = NULL;
25319 }
25320 else
25321 context_die = declare_in_namespace (type, context_die);
25322
25323 if (TREE_CODE (type) == ENUMERAL_TYPE)
25324 {
25325 /* This might have been written out by the call to
25326 declare_in_namespace. */
25327 if (!TREE_ASM_WRITTEN (type))
25328 gen_enumeration_type_die (type, context_die);
25329 }
25330 else
25331 gen_struct_or_union_type_die (type, context_die, usage);
25332
25333 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25334 it up if it is ever completed. gen_*_type_die will set it for us
25335 when appropriate. */
25336 }
25337
25338 /* Generate a type description DIE. */
25339
25340 static void
25341 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25342 enum debug_info_usage usage)
25343 {
25344 struct array_descr_info info;
25345
25346 if (type == NULL_TREE || type == error_mark_node)
25347 return;
25348
25349 if (flag_checking && type)
25350 verify_type (type);
25351
25352 if (TYPE_NAME (type) != NULL_TREE
25353 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25354 && is_redundant_typedef (TYPE_NAME (type))
25355 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25356 /* The DECL of this type is a typedef we don't want to emit debug
25357 info for but we want debug info for its underlying typedef.
25358 This can happen for e.g, the injected-class-name of a C++
25359 type. */
25360 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25361
25362 /* If TYPE is a typedef type variant, let's generate debug info
25363 for the parent typedef which TYPE is a type of. */
25364 if (typedef_variant_p (type))
25365 {
25366 if (TREE_ASM_WRITTEN (type))
25367 return;
25368
25369 tree name = TYPE_NAME (type);
25370 tree origin = decl_ultimate_origin (name);
25371 if (origin != NULL && origin != name)
25372 {
25373 gen_decl_die (origin, NULL, NULL, context_die);
25374 return;
25375 }
25376
25377 /* Prevent broken recursion; we can't hand off to the same type. */
25378 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25379
25380 /* Give typedefs the right scope. */
25381 context_die = scope_die_for (type, context_die);
25382
25383 TREE_ASM_WRITTEN (type) = 1;
25384
25385 gen_decl_die (name, NULL, NULL, context_die);
25386 return;
25387 }
25388
25389 /* If type is an anonymous tagged type named by a typedef, let's
25390 generate debug info for the typedef. */
25391 if (is_naming_typedef_decl (TYPE_NAME (type)))
25392 {
25393 /* Give typedefs the right scope. */
25394 context_die = scope_die_for (type, context_die);
25395
25396 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25397 return;
25398 }
25399
25400 if (lang_hooks.types.get_debug_type)
25401 {
25402 tree debug_type = lang_hooks.types.get_debug_type (type);
25403
25404 if (debug_type != NULL_TREE && debug_type != type)
25405 {
25406 gen_type_die_with_usage (debug_type, context_die, usage);
25407 return;
25408 }
25409 }
25410
25411 /* We are going to output a DIE to represent the unqualified version
25412 of this type (i.e. without any const or volatile qualifiers) so
25413 get the main variant (i.e. the unqualified version) of this type
25414 now. (Vectors and arrays are special because the debugging info is in the
25415 cloned type itself. Similarly function/method types can contain extra
25416 ref-qualification). */
25417 if (TREE_CODE (type) == FUNCTION_TYPE
25418 || TREE_CODE (type) == METHOD_TYPE)
25419 {
25420 /* For function/method types, can't use type_main_variant here,
25421 because that can have different ref-qualifiers for C++,
25422 but try to canonicalize. */
25423 tree main = TYPE_MAIN_VARIANT (type);
25424 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25425 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25426 && check_base_type (t, main)
25427 && check_lang_type (t, type))
25428 {
25429 type = t;
25430 break;
25431 }
25432 }
25433 else if (TREE_CODE (type) != VECTOR_TYPE
25434 && TREE_CODE (type) != ARRAY_TYPE)
25435 type = type_main_variant (type);
25436
25437 /* If this is an array type with hidden descriptor, handle it first. */
25438 if (!TREE_ASM_WRITTEN (type)
25439 && lang_hooks.types.get_array_descr_info)
25440 {
25441 memset (&info, 0, sizeof (info));
25442 if (lang_hooks.types.get_array_descr_info (type, &info))
25443 {
25444 /* Fortran sometimes emits array types with no dimension. */
25445 gcc_assert (info.ndimensions >= 0
25446 && (info.ndimensions
25447 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25448 gen_descr_array_type_die (type, &info, context_die);
25449 TREE_ASM_WRITTEN (type) = 1;
25450 return;
25451 }
25452 }
25453
25454 if (TREE_ASM_WRITTEN (type))
25455 {
25456 /* Variable-length types may be incomplete even if
25457 TREE_ASM_WRITTEN. For such types, fall through to
25458 gen_array_type_die() and possibly fill in
25459 DW_AT_{upper,lower}_bound attributes. */
25460 if ((TREE_CODE (type) != ARRAY_TYPE
25461 && TREE_CODE (type) != RECORD_TYPE
25462 && TREE_CODE (type) != UNION_TYPE
25463 && TREE_CODE (type) != QUAL_UNION_TYPE)
25464 || !variably_modified_type_p (type, NULL))
25465 return;
25466 }
25467
25468 switch (TREE_CODE (type))
25469 {
25470 case ERROR_MARK:
25471 break;
25472
25473 case POINTER_TYPE:
25474 case REFERENCE_TYPE:
25475 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25476 ensures that the gen_type_die recursion will terminate even if the
25477 type is recursive. Recursive types are possible in Ada. */
25478 /* ??? We could perhaps do this for all types before the switch
25479 statement. */
25480 TREE_ASM_WRITTEN (type) = 1;
25481
25482 /* For these types, all that is required is that we output a DIE (or a
25483 set of DIEs) to represent the "basis" type. */
25484 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25485 DINFO_USAGE_IND_USE);
25486 break;
25487
25488 case OFFSET_TYPE:
25489 /* This code is used for C++ pointer-to-data-member types.
25490 Output a description of the relevant class type. */
25491 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25492 DINFO_USAGE_IND_USE);
25493
25494 /* Output a description of the type of the object pointed to. */
25495 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25496 DINFO_USAGE_IND_USE);
25497
25498 /* Now output a DIE to represent this pointer-to-data-member type
25499 itself. */
25500 gen_ptr_to_mbr_type_die (type, context_die);
25501 break;
25502
25503 case FUNCTION_TYPE:
25504 /* Force out return type (in case it wasn't forced out already). */
25505 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25506 DINFO_USAGE_DIR_USE);
25507 gen_subroutine_type_die (type, context_die);
25508 break;
25509
25510 case METHOD_TYPE:
25511 /* Force out return type (in case it wasn't forced out already). */
25512 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25513 DINFO_USAGE_DIR_USE);
25514 gen_subroutine_type_die (type, context_die);
25515 break;
25516
25517 case ARRAY_TYPE:
25518 case VECTOR_TYPE:
25519 gen_array_type_die (type, context_die);
25520 break;
25521
25522 case ENUMERAL_TYPE:
25523 case RECORD_TYPE:
25524 case UNION_TYPE:
25525 case QUAL_UNION_TYPE:
25526 gen_tagged_type_die (type, context_die, usage);
25527 return;
25528
25529 case VOID_TYPE:
25530 case INTEGER_TYPE:
25531 case REAL_TYPE:
25532 case FIXED_POINT_TYPE:
25533 case COMPLEX_TYPE:
25534 case BOOLEAN_TYPE:
25535 /* No DIEs needed for fundamental types. */
25536 break;
25537
25538 case NULLPTR_TYPE:
25539 case LANG_TYPE:
25540 /* Just use DW_TAG_unspecified_type. */
25541 {
25542 dw_die_ref type_die = lookup_type_die (type);
25543 if (type_die == NULL)
25544 {
25545 tree name = TYPE_IDENTIFIER (type);
25546 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25547 type);
25548 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25549 equate_type_number_to_die (type, type_die);
25550 }
25551 }
25552 break;
25553
25554 default:
25555 if (is_cxx_auto (type))
25556 {
25557 tree name = TYPE_IDENTIFIER (type);
25558 dw_die_ref *die = (name == get_identifier ("auto")
25559 ? &auto_die : &decltype_auto_die);
25560 if (!*die)
25561 {
25562 *die = new_die (DW_TAG_unspecified_type,
25563 comp_unit_die (), NULL_TREE);
25564 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25565 }
25566 equate_type_number_to_die (type, *die);
25567 break;
25568 }
25569 gcc_unreachable ();
25570 }
25571
25572 TREE_ASM_WRITTEN (type) = 1;
25573 }
25574
25575 static void
25576 gen_type_die (tree type, dw_die_ref context_die)
25577 {
25578 if (type != error_mark_node)
25579 {
25580 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25581 if (flag_checking)
25582 {
25583 dw_die_ref die = lookup_type_die (type);
25584 if (die)
25585 check_die (die);
25586 }
25587 }
25588 }
25589
25590 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25591 things which are local to the given block. */
25592
25593 static void
25594 gen_block_die (tree stmt, dw_die_ref context_die)
25595 {
25596 int must_output_die = 0;
25597 bool inlined_func;
25598
25599 /* Ignore blocks that are NULL. */
25600 if (stmt == NULL_TREE)
25601 return;
25602
25603 inlined_func = inlined_function_outer_scope_p (stmt);
25604
25605 /* If the block is one fragment of a non-contiguous block, do not
25606 process the variables, since they will have been done by the
25607 origin block. Do process subblocks. */
25608 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25609 {
25610 tree sub;
25611
25612 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25613 gen_block_die (sub, context_die);
25614
25615 return;
25616 }
25617
25618 /* Determine if we need to output any Dwarf DIEs at all to represent this
25619 block. */
25620 if (inlined_func)
25621 /* The outer scopes for inlinings *must* always be represented. We
25622 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25623 must_output_die = 1;
25624 else if (BLOCK_DIE (stmt))
25625 /* If we already have a DIE then it was filled early. Meanwhile
25626 we might have pruned all BLOCK_VARS as optimized out but we
25627 still want to generate high/low PC attributes so output it. */
25628 must_output_die = 1;
25629 else if (TREE_USED (stmt)
25630 || TREE_ASM_WRITTEN (stmt)
25631 || BLOCK_ABSTRACT (stmt))
25632 {
25633 /* Determine if this block directly contains any "significant"
25634 local declarations which we will need to output DIEs for. */
25635 if (debug_info_level > DINFO_LEVEL_TERSE)
25636 {
25637 /* We are not in terse mode so any local declaration that
25638 is not ignored for debug purposes counts as being a
25639 "significant" one. */
25640 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25641 must_output_die = 1;
25642 else
25643 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25644 if (!DECL_IGNORED_P (var))
25645 {
25646 must_output_die = 1;
25647 break;
25648 }
25649 }
25650 else if (!dwarf2out_ignore_block (stmt))
25651 must_output_die = 1;
25652 }
25653
25654 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25655 DIE for any block which contains no significant local declarations at
25656 all. Rather, in such cases we just call `decls_for_scope' so that any
25657 needed Dwarf info for any sub-blocks will get properly generated. Note
25658 that in terse mode, our definition of what constitutes a "significant"
25659 local declaration gets restricted to include only inlined function
25660 instances and local (nested) function definitions. */
25661 if (must_output_die)
25662 {
25663 if (inlined_func)
25664 {
25665 /* If STMT block is abstract, that means we have been called
25666 indirectly from dwarf2out_abstract_function.
25667 That function rightfully marks the descendent blocks (of
25668 the abstract function it is dealing with) as being abstract,
25669 precisely to prevent us from emitting any
25670 DW_TAG_inlined_subroutine DIE as a descendent
25671 of an abstract function instance. So in that case, we should
25672 not call gen_inlined_subroutine_die.
25673
25674 Later though, when cgraph asks dwarf2out to emit info
25675 for the concrete instance of the function decl into which
25676 the concrete instance of STMT got inlined, the later will lead
25677 to the generation of a DW_TAG_inlined_subroutine DIE. */
25678 if (! BLOCK_ABSTRACT (stmt))
25679 gen_inlined_subroutine_die (stmt, context_die);
25680 }
25681 else
25682 gen_lexical_block_die (stmt, context_die);
25683 }
25684 else
25685 decls_for_scope (stmt, context_die);
25686 }
25687
25688 /* Process variable DECL (or variable with origin ORIGIN) within
25689 block STMT and add it to CONTEXT_DIE. */
25690 static void
25691 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25692 {
25693 dw_die_ref die;
25694 tree decl_or_origin = decl ? decl : origin;
25695
25696 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25697 die = lookup_decl_die (decl_or_origin);
25698 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25699 {
25700 if (TYPE_DECL_IS_STUB (decl_or_origin))
25701 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25702 else
25703 die = lookup_decl_die (decl_or_origin);
25704 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25705 if (! die && ! early_dwarf)
25706 return;
25707 }
25708 else
25709 die = NULL;
25710
25711 /* Avoid creating DIEs for local typedefs and concrete static variables that
25712 will only be pruned later. */
25713 if ((origin || decl_ultimate_origin (decl))
25714 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25715 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25716 {
25717 origin = decl_ultimate_origin (decl_or_origin);
25718 if (decl && VAR_P (decl) && die != NULL)
25719 {
25720 die = lookup_decl_die (origin);
25721 if (die != NULL)
25722 equate_decl_number_to_die (decl, die);
25723 }
25724 return;
25725 }
25726
25727 if (die != NULL && die->die_parent == NULL)
25728 add_child_die (context_die, die);
25729 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25730 {
25731 if (early_dwarf)
25732 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25733 stmt, context_die);
25734 }
25735 else
25736 {
25737 if (decl && DECL_P (decl))
25738 {
25739 die = lookup_decl_die (decl);
25740
25741 /* Early created DIEs do not have a parent as the decls refer
25742 to the function as DECL_CONTEXT rather than the BLOCK. */
25743 if (die && die->die_parent == NULL)
25744 {
25745 gcc_assert (in_lto_p);
25746 add_child_die (context_die, die);
25747 }
25748 }
25749
25750 gen_decl_die (decl, origin, NULL, context_die);
25751 }
25752 }
25753
25754 /* Generate all of the decls declared within a given scope and (recursively)
25755 all of its sub-blocks. */
25756
25757 static void
25758 decls_for_scope (tree stmt, dw_die_ref context_die)
25759 {
25760 tree decl;
25761 unsigned int i;
25762 tree subblocks;
25763
25764 /* Ignore NULL blocks. */
25765 if (stmt == NULL_TREE)
25766 return;
25767
25768 /* Output the DIEs to represent all of the data objects and typedefs
25769 declared directly within this block but not within any nested
25770 sub-blocks. Also, nested function and tag DIEs have been
25771 generated with a parent of NULL; fix that up now. We don't
25772 have to do this if we're at -g1. */
25773 if (debug_info_level > DINFO_LEVEL_TERSE)
25774 {
25775 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25776 process_scope_var (stmt, decl, NULL_TREE, context_die);
25777 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25778 origin - avoid doing this twice as we have no good way to see
25779 if we've done it once already. */
25780 if (! early_dwarf)
25781 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25782 {
25783 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25784 if (decl == current_function_decl)
25785 /* Ignore declarations of the current function, while they
25786 are declarations, gen_subprogram_die would treat them
25787 as definitions again, because they are equal to
25788 current_function_decl and endlessly recurse. */;
25789 else if (TREE_CODE (decl) == FUNCTION_DECL)
25790 process_scope_var (stmt, decl, NULL_TREE, context_die);
25791 else
25792 process_scope_var (stmt, NULL_TREE, decl, context_die);
25793 }
25794 }
25795
25796 /* Even if we're at -g1, we need to process the subblocks in order to get
25797 inlined call information. */
25798
25799 /* Output the DIEs to represent all sub-blocks (and the items declared
25800 therein) of this block. */
25801 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25802 subblocks != NULL;
25803 subblocks = BLOCK_CHAIN (subblocks))
25804 gen_block_die (subblocks, context_die);
25805 }
25806
25807 /* Is this a typedef we can avoid emitting? */
25808
25809 bool
25810 is_redundant_typedef (const_tree decl)
25811 {
25812 if (TYPE_DECL_IS_STUB (decl))
25813 return true;
25814
25815 if (DECL_ARTIFICIAL (decl)
25816 && DECL_CONTEXT (decl)
25817 && is_tagged_type (DECL_CONTEXT (decl))
25818 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25819 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25820 /* Also ignore the artificial member typedef for the class name. */
25821 return true;
25822
25823 return false;
25824 }
25825
25826 /* Return TRUE if TYPE is a typedef that names a type for linkage
25827 purposes. This kind of typedefs is produced by the C++ FE for
25828 constructs like:
25829
25830 typedef struct {...} foo;
25831
25832 In that case, there is no typedef variant type produced for foo.
25833 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25834 struct type. */
25835
25836 static bool
25837 is_naming_typedef_decl (const_tree decl)
25838 {
25839 if (decl == NULL_TREE
25840 || TREE_CODE (decl) != TYPE_DECL
25841 || DECL_NAMELESS (decl)
25842 || !is_tagged_type (TREE_TYPE (decl))
25843 || DECL_IS_BUILTIN (decl)
25844 || is_redundant_typedef (decl)
25845 /* It looks like Ada produces TYPE_DECLs that are very similar
25846 to C++ naming typedefs but that have different
25847 semantics. Let's be specific to c++ for now. */
25848 || !is_cxx (decl))
25849 return FALSE;
25850
25851 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25852 && TYPE_NAME (TREE_TYPE (decl)) == decl
25853 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25854 != TYPE_NAME (TREE_TYPE (decl))));
25855 }
25856
25857 /* Looks up the DIE for a context. */
25858
25859 static inline dw_die_ref
25860 lookup_context_die (tree context)
25861 {
25862 if (context)
25863 {
25864 /* Find die that represents this context. */
25865 if (TYPE_P (context))
25866 {
25867 context = TYPE_MAIN_VARIANT (context);
25868 dw_die_ref ctx = lookup_type_die (context);
25869 if (!ctx)
25870 return NULL;
25871 return strip_naming_typedef (context, ctx);
25872 }
25873 else
25874 return lookup_decl_die (context);
25875 }
25876 return comp_unit_die ();
25877 }
25878
25879 /* Returns the DIE for a context. */
25880
25881 static inline dw_die_ref
25882 get_context_die (tree context)
25883 {
25884 if (context)
25885 {
25886 /* Find die that represents this context. */
25887 if (TYPE_P (context))
25888 {
25889 context = TYPE_MAIN_VARIANT (context);
25890 return strip_naming_typedef (context, force_type_die (context));
25891 }
25892 else
25893 return force_decl_die (context);
25894 }
25895 return comp_unit_die ();
25896 }
25897
25898 /* Returns the DIE for decl. A DIE will always be returned. */
25899
25900 static dw_die_ref
25901 force_decl_die (tree decl)
25902 {
25903 dw_die_ref decl_die;
25904 unsigned saved_external_flag;
25905 tree save_fn = NULL_TREE;
25906 decl_die = lookup_decl_die (decl);
25907 if (!decl_die)
25908 {
25909 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25910
25911 decl_die = lookup_decl_die (decl);
25912 if (decl_die)
25913 return decl_die;
25914
25915 switch (TREE_CODE (decl))
25916 {
25917 case FUNCTION_DECL:
25918 /* Clear current_function_decl, so that gen_subprogram_die thinks
25919 that this is a declaration. At this point, we just want to force
25920 declaration die. */
25921 save_fn = current_function_decl;
25922 current_function_decl = NULL_TREE;
25923 gen_subprogram_die (decl, context_die);
25924 current_function_decl = save_fn;
25925 break;
25926
25927 case VAR_DECL:
25928 /* Set external flag to force declaration die. Restore it after
25929 gen_decl_die() call. */
25930 saved_external_flag = DECL_EXTERNAL (decl);
25931 DECL_EXTERNAL (decl) = 1;
25932 gen_decl_die (decl, NULL, NULL, context_die);
25933 DECL_EXTERNAL (decl) = saved_external_flag;
25934 break;
25935
25936 case NAMESPACE_DECL:
25937 if (dwarf_version >= 3 || !dwarf_strict)
25938 dwarf2out_decl (decl);
25939 else
25940 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25941 decl_die = comp_unit_die ();
25942 break;
25943
25944 case TRANSLATION_UNIT_DECL:
25945 decl_die = comp_unit_die ();
25946 break;
25947
25948 default:
25949 gcc_unreachable ();
25950 }
25951
25952 /* We should be able to find the DIE now. */
25953 if (!decl_die)
25954 decl_die = lookup_decl_die (decl);
25955 gcc_assert (decl_die);
25956 }
25957
25958 return decl_die;
25959 }
25960
25961 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25962 always returned. */
25963
25964 static dw_die_ref
25965 force_type_die (tree type)
25966 {
25967 dw_die_ref type_die;
25968
25969 type_die = lookup_type_die (type);
25970 if (!type_die)
25971 {
25972 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25973
25974 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25975 false, context_die);
25976 gcc_assert (type_die);
25977 }
25978 return type_die;
25979 }
25980
25981 /* Force out any required namespaces to be able to output DECL,
25982 and return the new context_die for it, if it's changed. */
25983
25984 static dw_die_ref
25985 setup_namespace_context (tree thing, dw_die_ref context_die)
25986 {
25987 tree context = (DECL_P (thing)
25988 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
25989 if (context && TREE_CODE (context) == NAMESPACE_DECL)
25990 /* Force out the namespace. */
25991 context_die = force_decl_die (context);
25992
25993 return context_die;
25994 }
25995
25996 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
25997 type) within its namespace, if appropriate.
25998
25999 For compatibility with older debuggers, namespace DIEs only contain
26000 declarations; all definitions are emitted at CU scope, with
26001 DW_AT_specification pointing to the declaration (like with class
26002 members). */
26003
26004 static dw_die_ref
26005 declare_in_namespace (tree thing, dw_die_ref context_die)
26006 {
26007 dw_die_ref ns_context;
26008
26009 if (debug_info_level <= DINFO_LEVEL_TERSE)
26010 return context_die;
26011
26012 /* External declarations in the local scope only need to be emitted
26013 once, not once in the namespace and once in the scope.
26014
26015 This avoids declaring the `extern' below in the
26016 namespace DIE as well as in the innermost scope:
26017
26018 namespace S
26019 {
26020 int i=5;
26021 int foo()
26022 {
26023 int i=8;
26024 extern int i;
26025 return i;
26026 }
26027 }
26028 */
26029 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26030 return context_die;
26031
26032 /* If this decl is from an inlined function, then don't try to emit it in its
26033 namespace, as we will get confused. It would have already been emitted
26034 when the abstract instance of the inline function was emitted anyways. */
26035 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26036 return context_die;
26037
26038 ns_context = setup_namespace_context (thing, context_die);
26039
26040 if (ns_context != context_die)
26041 {
26042 if (is_fortran ())
26043 return ns_context;
26044 if (DECL_P (thing))
26045 gen_decl_die (thing, NULL, NULL, ns_context);
26046 else
26047 gen_type_die (thing, ns_context);
26048 }
26049 return context_die;
26050 }
26051
26052 /* Generate a DIE for a namespace or namespace alias. */
26053
26054 static void
26055 gen_namespace_die (tree decl, dw_die_ref context_die)
26056 {
26057 dw_die_ref namespace_die;
26058
26059 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26060 they are an alias of. */
26061 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26062 {
26063 /* Output a real namespace or module. */
26064 context_die = setup_namespace_context (decl, comp_unit_die ());
26065 namespace_die = new_die (is_fortran ()
26066 ? DW_TAG_module : DW_TAG_namespace,
26067 context_die, decl);
26068 /* For Fortran modules defined in different CU don't add src coords. */
26069 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26070 {
26071 const char *name = dwarf2_name (decl, 0);
26072 if (name)
26073 add_name_attribute (namespace_die, name);
26074 }
26075 else
26076 add_name_and_src_coords_attributes (namespace_die, decl);
26077 if (DECL_EXTERNAL (decl))
26078 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26079 equate_decl_number_to_die (decl, namespace_die);
26080 }
26081 else
26082 {
26083 /* Output a namespace alias. */
26084
26085 /* Force out the namespace we are an alias of, if necessary. */
26086 dw_die_ref origin_die
26087 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26088
26089 if (DECL_FILE_SCOPE_P (decl)
26090 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26091 context_die = setup_namespace_context (decl, comp_unit_die ());
26092 /* Now create the namespace alias DIE. */
26093 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26094 add_name_and_src_coords_attributes (namespace_die, decl);
26095 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26096 equate_decl_number_to_die (decl, namespace_die);
26097 }
26098 if ((dwarf_version >= 5 || !dwarf_strict)
26099 && lang_hooks.decls.decl_dwarf_attribute (decl,
26100 DW_AT_export_symbols) == 1)
26101 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26102
26103 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26104 if (want_pubnames ())
26105 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26106 }
26107
26108 /* Generate Dwarf debug information for a decl described by DECL.
26109 The return value is currently only meaningful for PARM_DECLs,
26110 for all other decls it returns NULL.
26111
26112 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26113 It can be NULL otherwise. */
26114
26115 static dw_die_ref
26116 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26117 dw_die_ref context_die)
26118 {
26119 tree decl_or_origin = decl ? decl : origin;
26120 tree class_origin = NULL, ultimate_origin;
26121
26122 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26123 return NULL;
26124
26125 switch (TREE_CODE (decl_or_origin))
26126 {
26127 case ERROR_MARK:
26128 break;
26129
26130 case CONST_DECL:
26131 if (!is_fortran () && !is_ada ())
26132 {
26133 /* The individual enumerators of an enum type get output when we output
26134 the Dwarf representation of the relevant enum type itself. */
26135 break;
26136 }
26137
26138 /* Emit its type. */
26139 gen_type_die (TREE_TYPE (decl), context_die);
26140
26141 /* And its containing namespace. */
26142 context_die = declare_in_namespace (decl, context_die);
26143
26144 gen_const_die (decl, context_die);
26145 break;
26146
26147 case FUNCTION_DECL:
26148 #if 0
26149 /* FIXME */
26150 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26151 on local redeclarations of global functions. That seems broken. */
26152 if (current_function_decl != decl)
26153 /* This is only a declaration. */;
26154 #endif
26155
26156 /* We should have abstract copies already and should not generate
26157 stray type DIEs in late LTO dumping. */
26158 if (! early_dwarf)
26159 ;
26160
26161 /* If we're emitting a clone, emit info for the abstract instance. */
26162 else if (origin || DECL_ORIGIN (decl) != decl)
26163 dwarf2out_abstract_function (origin
26164 ? DECL_ORIGIN (origin)
26165 : DECL_ABSTRACT_ORIGIN (decl));
26166
26167 /* If we're emitting a possibly inlined function emit it as
26168 abstract instance. */
26169 else if (cgraph_function_possibly_inlined_p (decl)
26170 && ! DECL_ABSTRACT_P (decl)
26171 && ! class_or_namespace_scope_p (context_die)
26172 /* dwarf2out_abstract_function won't emit a die if this is just
26173 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26174 that case, because that works only if we have a die. */
26175 && DECL_INITIAL (decl) != NULL_TREE)
26176 dwarf2out_abstract_function (decl);
26177
26178 /* Otherwise we're emitting the primary DIE for this decl. */
26179 else if (debug_info_level > DINFO_LEVEL_TERSE)
26180 {
26181 /* Before we describe the FUNCTION_DECL itself, make sure that we
26182 have its containing type. */
26183 if (!origin)
26184 origin = decl_class_context (decl);
26185 if (origin != NULL_TREE)
26186 gen_type_die (origin, context_die);
26187
26188 /* And its return type. */
26189 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26190
26191 /* And its virtual context. */
26192 if (DECL_VINDEX (decl) != NULL_TREE)
26193 gen_type_die (DECL_CONTEXT (decl), context_die);
26194
26195 /* Make sure we have a member DIE for decl. */
26196 if (origin != NULL_TREE)
26197 gen_type_die_for_member (origin, decl, context_die);
26198
26199 /* And its containing namespace. */
26200 context_die = declare_in_namespace (decl, context_die);
26201 }
26202
26203 /* Now output a DIE to represent the function itself. */
26204 if (decl)
26205 gen_subprogram_die (decl, context_die);
26206 break;
26207
26208 case TYPE_DECL:
26209 /* If we are in terse mode, don't generate any DIEs to represent any
26210 actual typedefs. */
26211 if (debug_info_level <= DINFO_LEVEL_TERSE)
26212 break;
26213
26214 /* In the special case of a TYPE_DECL node representing the declaration
26215 of some type tag, if the given TYPE_DECL is marked as having been
26216 instantiated from some other (original) TYPE_DECL node (e.g. one which
26217 was generated within the original definition of an inline function) we
26218 used to generate a special (abbreviated) DW_TAG_structure_type,
26219 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26220 should be actually referencing those DIEs, as variable DIEs with that
26221 type would be emitted already in the abstract origin, so it was always
26222 removed during unused type prunning. Don't add anything in this
26223 case. */
26224 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26225 break;
26226
26227 if (is_redundant_typedef (decl))
26228 gen_type_die (TREE_TYPE (decl), context_die);
26229 else
26230 /* Output a DIE to represent the typedef itself. */
26231 gen_typedef_die (decl, context_die);
26232 break;
26233
26234 case LABEL_DECL:
26235 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26236 gen_label_die (decl, context_die);
26237 break;
26238
26239 case VAR_DECL:
26240 case RESULT_DECL:
26241 /* If we are in terse mode, don't generate any DIEs to represent any
26242 variable declarations or definitions. */
26243 if (debug_info_level <= DINFO_LEVEL_TERSE)
26244 break;
26245
26246 /* Avoid generating stray type DIEs during late dwarf dumping.
26247 All types have been dumped early. */
26248 if (early_dwarf
26249 /* ??? But in LTRANS we cannot annotate early created variably
26250 modified type DIEs without copying them and adjusting all
26251 references to them. Dump them again as happens for inlining
26252 which copies both the decl and the types. */
26253 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26254 in VLA bound information for example. */
26255 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26256 current_function_decl)))
26257 {
26258 /* Output any DIEs that are needed to specify the type of this data
26259 object. */
26260 if (decl_by_reference_p (decl_or_origin))
26261 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26262 else
26263 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26264 }
26265
26266 if (early_dwarf)
26267 {
26268 /* And its containing type. */
26269 class_origin = decl_class_context (decl_or_origin);
26270 if (class_origin != NULL_TREE)
26271 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26272
26273 /* And its containing namespace. */
26274 context_die = declare_in_namespace (decl_or_origin, context_die);
26275 }
26276
26277 /* Now output the DIE to represent the data object itself. This gets
26278 complicated because of the possibility that the VAR_DECL really
26279 represents an inlined instance of a formal parameter for an inline
26280 function. */
26281 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26282 if (ultimate_origin != NULL_TREE
26283 && TREE_CODE (ultimate_origin) == PARM_DECL)
26284 gen_formal_parameter_die (decl, origin,
26285 true /* Emit name attribute. */,
26286 context_die);
26287 else
26288 gen_variable_die (decl, origin, context_die);
26289 break;
26290
26291 case FIELD_DECL:
26292 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26293 /* Ignore the nameless fields that are used to skip bits but handle C++
26294 anonymous unions and structs. */
26295 if (DECL_NAME (decl) != NULL_TREE
26296 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26297 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26298 {
26299 gen_type_die (member_declared_type (decl), context_die);
26300 gen_field_die (decl, ctx, context_die);
26301 }
26302 break;
26303
26304 case PARM_DECL:
26305 /* Avoid generating stray type DIEs during late dwarf dumping.
26306 All types have been dumped early. */
26307 if (early_dwarf
26308 /* ??? But in LTRANS we cannot annotate early created variably
26309 modified type DIEs without copying them and adjusting all
26310 references to them. Dump them again as happens for inlining
26311 which copies both the decl and the types. */
26312 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26313 in VLA bound information for example. */
26314 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26315 current_function_decl)))
26316 {
26317 if (DECL_BY_REFERENCE (decl_or_origin))
26318 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26319 else
26320 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26321 }
26322 return gen_formal_parameter_die (decl, origin,
26323 true /* Emit name attribute. */,
26324 context_die);
26325
26326 case NAMESPACE_DECL:
26327 if (dwarf_version >= 3 || !dwarf_strict)
26328 gen_namespace_die (decl, context_die);
26329 break;
26330
26331 case IMPORTED_DECL:
26332 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26333 DECL_CONTEXT (decl), context_die);
26334 break;
26335
26336 case NAMELIST_DECL:
26337 gen_namelist_decl (DECL_NAME (decl), context_die,
26338 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26339 break;
26340
26341 default:
26342 /* Probably some frontend-internal decl. Assume we don't care. */
26343 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26344 break;
26345 }
26346
26347 return NULL;
26348 }
26349 \f
26350 /* Output initial debug information for global DECL. Called at the
26351 end of the parsing process.
26352
26353 This is the initial debug generation process. As such, the DIEs
26354 generated may be incomplete. A later debug generation pass
26355 (dwarf2out_late_global_decl) will augment the information generated
26356 in this pass (e.g., with complete location info). */
26357
26358 static void
26359 dwarf2out_early_global_decl (tree decl)
26360 {
26361 set_early_dwarf s;
26362
26363 /* gen_decl_die() will set DECL_ABSTRACT because
26364 cgraph_function_possibly_inlined_p() returns true. This is in
26365 turn will cause DW_AT_inline attributes to be set.
26366
26367 This happens because at early dwarf generation, there is no
26368 cgraph information, causing cgraph_function_possibly_inlined_p()
26369 to return true. Trick cgraph_function_possibly_inlined_p()
26370 while we generate dwarf early. */
26371 bool save = symtab->global_info_ready;
26372 symtab->global_info_ready = true;
26373
26374 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26375 other DECLs and they can point to template types or other things
26376 that dwarf2out can't handle when done via dwarf2out_decl. */
26377 if (TREE_CODE (decl) != TYPE_DECL
26378 && TREE_CODE (decl) != PARM_DECL)
26379 {
26380 if (TREE_CODE (decl) == FUNCTION_DECL)
26381 {
26382 tree save_fndecl = current_function_decl;
26383
26384 /* For nested functions, make sure we have DIEs for the parents first
26385 so that all nested DIEs are generated at the proper scope in the
26386 first shot. */
26387 tree context = decl_function_context (decl);
26388 if (context != NULL)
26389 {
26390 dw_die_ref context_die = lookup_decl_die (context);
26391 current_function_decl = context;
26392
26393 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26394 enough so that it lands in its own context. This avoids type
26395 pruning issues later on. */
26396 if (context_die == NULL || is_declaration_die (context_die))
26397 dwarf2out_decl (context);
26398 }
26399
26400 /* Emit an abstract origin of a function first. This happens
26401 with C++ constructor clones for example and makes
26402 dwarf2out_abstract_function happy which requires the early
26403 DIE of the abstract instance to be present. */
26404 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26405 dw_die_ref origin_die;
26406 if (origin != NULL
26407 /* Do not emit the DIE multiple times but make sure to
26408 process it fully here in case we just saw a declaration. */
26409 && ((origin_die = lookup_decl_die (origin)) == NULL
26410 || is_declaration_die (origin_die)))
26411 {
26412 current_function_decl = origin;
26413 dwarf2out_decl (origin);
26414 }
26415
26416 /* Emit the DIE for decl but avoid doing that multiple times. */
26417 dw_die_ref old_die;
26418 if ((old_die = lookup_decl_die (decl)) == NULL
26419 || is_declaration_die (old_die))
26420 {
26421 current_function_decl = decl;
26422 dwarf2out_decl (decl);
26423 }
26424
26425 current_function_decl = save_fndecl;
26426 }
26427 else
26428 dwarf2out_decl (decl);
26429 }
26430 symtab->global_info_ready = save;
26431 }
26432
26433 /* Return whether EXPR is an expression with the following pattern:
26434 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26435
26436 static bool
26437 is_trivial_indirect_ref (tree expr)
26438 {
26439 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26440 return false;
26441
26442 tree nop = TREE_OPERAND (expr, 0);
26443 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26444 return false;
26445
26446 tree int_cst = TREE_OPERAND (nop, 0);
26447 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26448 }
26449
26450 /* Output debug information for global decl DECL. Called from
26451 toplev.c after compilation proper has finished. */
26452
26453 static void
26454 dwarf2out_late_global_decl (tree decl)
26455 {
26456 /* Fill-in any location information we were unable to determine
26457 on the first pass. */
26458 if (VAR_P (decl))
26459 {
26460 dw_die_ref die = lookup_decl_die (decl);
26461
26462 /* We may have to generate early debug late for LTO in case debug
26463 was not enabled at compile-time or the target doesn't support
26464 the LTO early debug scheme. */
26465 if (! die && in_lto_p)
26466 {
26467 dwarf2out_decl (decl);
26468 die = lookup_decl_die (decl);
26469 }
26470
26471 if (die)
26472 {
26473 /* We get called via the symtab code invoking late_global_decl
26474 for symbols that are optimized out.
26475
26476 Do not add locations for those, except if they have a
26477 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26478 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26479 INDIRECT_REF expression, as this could generate relocations to
26480 text symbols in LTO object files, which is invalid. */
26481 varpool_node *node = varpool_node::get (decl);
26482 if ((! node || ! node->definition)
26483 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26484 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26485 tree_add_const_value_attribute_for_decl (die, decl);
26486 else
26487 add_location_or_const_value_attribute (die, decl, false);
26488 }
26489 }
26490 }
26491
26492 /* Output debug information for type decl DECL. Called from toplev.c
26493 and from language front ends (to record built-in types). */
26494 static void
26495 dwarf2out_type_decl (tree decl, int local)
26496 {
26497 if (!local)
26498 {
26499 set_early_dwarf s;
26500 dwarf2out_decl (decl);
26501 }
26502 }
26503
26504 /* Output debug information for imported module or decl DECL.
26505 NAME is non-NULL name in the lexical block if the decl has been renamed.
26506 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26507 that DECL belongs to.
26508 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26509 static void
26510 dwarf2out_imported_module_or_decl_1 (tree decl,
26511 tree name,
26512 tree lexical_block,
26513 dw_die_ref lexical_block_die)
26514 {
26515 expanded_location xloc;
26516 dw_die_ref imported_die = NULL;
26517 dw_die_ref at_import_die;
26518
26519 if (TREE_CODE (decl) == IMPORTED_DECL)
26520 {
26521 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26522 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26523 gcc_assert (decl);
26524 }
26525 else
26526 xloc = expand_location (input_location);
26527
26528 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26529 {
26530 at_import_die = force_type_die (TREE_TYPE (decl));
26531 /* For namespace N { typedef void T; } using N::T; base_type_die
26532 returns NULL, but DW_TAG_imported_declaration requires
26533 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26534 if (!at_import_die)
26535 {
26536 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26537 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26538 at_import_die = lookup_type_die (TREE_TYPE (decl));
26539 gcc_assert (at_import_die);
26540 }
26541 }
26542 else
26543 {
26544 at_import_die = lookup_decl_die (decl);
26545 if (!at_import_die)
26546 {
26547 /* If we're trying to avoid duplicate debug info, we may not have
26548 emitted the member decl for this field. Emit it now. */
26549 if (TREE_CODE (decl) == FIELD_DECL)
26550 {
26551 tree type = DECL_CONTEXT (decl);
26552
26553 if (TYPE_CONTEXT (type)
26554 && TYPE_P (TYPE_CONTEXT (type))
26555 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26556 DINFO_USAGE_DIR_USE))
26557 return;
26558 gen_type_die_for_member (type, decl,
26559 get_context_die (TYPE_CONTEXT (type)));
26560 }
26561 if (TREE_CODE (decl) == NAMELIST_DECL)
26562 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26563 get_context_die (DECL_CONTEXT (decl)),
26564 NULL_TREE);
26565 else
26566 at_import_die = force_decl_die (decl);
26567 }
26568 }
26569
26570 if (TREE_CODE (decl) == NAMESPACE_DECL)
26571 {
26572 if (dwarf_version >= 3 || !dwarf_strict)
26573 imported_die = new_die (DW_TAG_imported_module,
26574 lexical_block_die,
26575 lexical_block);
26576 else
26577 return;
26578 }
26579 else
26580 imported_die = new_die (DW_TAG_imported_declaration,
26581 lexical_block_die,
26582 lexical_block);
26583
26584 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26585 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26586 if (debug_column_info && xloc.column)
26587 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26588 if (name)
26589 add_AT_string (imported_die, DW_AT_name,
26590 IDENTIFIER_POINTER (name));
26591 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26592 }
26593
26594 /* Output debug information for imported module or decl DECL.
26595 NAME is non-NULL name in context if the decl has been renamed.
26596 CHILD is true if decl is one of the renamed decls as part of
26597 importing whole module.
26598 IMPLICIT is set if this hook is called for an implicit import
26599 such as inline namespace. */
26600
26601 static void
26602 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26603 bool child, bool implicit)
26604 {
26605 /* dw_die_ref at_import_die; */
26606 dw_die_ref scope_die;
26607
26608 if (debug_info_level <= DINFO_LEVEL_TERSE)
26609 return;
26610
26611 gcc_assert (decl);
26612
26613 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26614 should be enough, for DWARF4 and older even if we emit as extension
26615 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26616 for the benefit of consumers unaware of DW_AT_export_symbols. */
26617 if (implicit
26618 && dwarf_version >= 5
26619 && lang_hooks.decls.decl_dwarf_attribute (decl,
26620 DW_AT_export_symbols) == 1)
26621 return;
26622
26623 set_early_dwarf s;
26624
26625 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26626 We need decl DIE for reference and scope die. First, get DIE for the decl
26627 itself. */
26628
26629 /* Get the scope die for decl context. Use comp_unit_die for global module
26630 or decl. If die is not found for non globals, force new die. */
26631 if (context
26632 && TYPE_P (context)
26633 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26634 return;
26635
26636 scope_die = get_context_die (context);
26637
26638 if (child)
26639 {
26640 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26641 there is nothing we can do, here. */
26642 if (dwarf_version < 3 && dwarf_strict)
26643 return;
26644
26645 gcc_assert (scope_die->die_child);
26646 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26647 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26648 scope_die = scope_die->die_child;
26649 }
26650
26651 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26652 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26653 }
26654
26655 /* Output debug information for namelists. */
26656
26657 static dw_die_ref
26658 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26659 {
26660 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26661 tree value;
26662 unsigned i;
26663
26664 if (debug_info_level <= DINFO_LEVEL_TERSE)
26665 return NULL;
26666
26667 gcc_assert (scope_die != NULL);
26668 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26669 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26670
26671 /* If there are no item_decls, we have a nondefining namelist, e.g.
26672 with USE association; hence, set DW_AT_declaration. */
26673 if (item_decls == NULL_TREE)
26674 {
26675 add_AT_flag (nml_die, DW_AT_declaration, 1);
26676 return nml_die;
26677 }
26678
26679 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26680 {
26681 nml_item_ref_die = lookup_decl_die (value);
26682 if (!nml_item_ref_die)
26683 nml_item_ref_die = force_decl_die (value);
26684
26685 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26686 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26687 }
26688 return nml_die;
26689 }
26690
26691
26692 /* Write the debugging output for DECL and return the DIE. */
26693
26694 static void
26695 dwarf2out_decl (tree decl)
26696 {
26697 dw_die_ref context_die = comp_unit_die ();
26698
26699 switch (TREE_CODE (decl))
26700 {
26701 case ERROR_MARK:
26702 return;
26703
26704 case FUNCTION_DECL:
26705 /* If we're a nested function, initially use a parent of NULL; if we're
26706 a plain function, this will be fixed up in decls_for_scope. If
26707 we're a method, it will be ignored, since we already have a DIE. */
26708 if (decl_function_context (decl)
26709 /* But if we're in terse mode, we don't care about scope. */
26710 && debug_info_level > DINFO_LEVEL_TERSE)
26711 context_die = NULL;
26712 break;
26713
26714 case VAR_DECL:
26715 /* For local statics lookup proper context die. */
26716 if (local_function_static (decl))
26717 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26718
26719 /* If we are in terse mode, don't generate any DIEs to represent any
26720 variable declarations or definitions. */
26721 if (debug_info_level <= DINFO_LEVEL_TERSE)
26722 return;
26723 break;
26724
26725 case CONST_DECL:
26726 if (debug_info_level <= DINFO_LEVEL_TERSE)
26727 return;
26728 if (!is_fortran () && !is_ada ())
26729 return;
26730 if (TREE_STATIC (decl) && decl_function_context (decl))
26731 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26732 break;
26733
26734 case NAMESPACE_DECL:
26735 case IMPORTED_DECL:
26736 if (debug_info_level <= DINFO_LEVEL_TERSE)
26737 return;
26738 if (lookup_decl_die (decl) != NULL)
26739 return;
26740 break;
26741
26742 case TYPE_DECL:
26743 /* Don't emit stubs for types unless they are needed by other DIEs. */
26744 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26745 return;
26746
26747 /* Don't bother trying to generate any DIEs to represent any of the
26748 normal built-in types for the language we are compiling. */
26749 if (DECL_IS_BUILTIN (decl))
26750 return;
26751
26752 /* If we are in terse mode, don't generate any DIEs for types. */
26753 if (debug_info_level <= DINFO_LEVEL_TERSE)
26754 return;
26755
26756 /* If we're a function-scope tag, initially use a parent of NULL;
26757 this will be fixed up in decls_for_scope. */
26758 if (decl_function_context (decl))
26759 context_die = NULL;
26760
26761 break;
26762
26763 case NAMELIST_DECL:
26764 break;
26765
26766 default:
26767 return;
26768 }
26769
26770 gen_decl_die (decl, NULL, NULL, context_die);
26771
26772 if (flag_checking)
26773 {
26774 dw_die_ref die = lookup_decl_die (decl);
26775 if (die)
26776 check_die (die);
26777 }
26778 }
26779
26780 /* Write the debugging output for DECL. */
26781
26782 static void
26783 dwarf2out_function_decl (tree decl)
26784 {
26785 dwarf2out_decl (decl);
26786 call_arg_locations = NULL;
26787 call_arg_loc_last = NULL;
26788 call_site_count = -1;
26789 tail_call_site_count = -1;
26790 decl_loc_table->empty ();
26791 cached_dw_loc_list_table->empty ();
26792 }
26793
26794 /* Output a marker (i.e. a label) for the beginning of the generated code for
26795 a lexical block. */
26796
26797 static void
26798 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26799 unsigned int blocknum)
26800 {
26801 switch_to_section (current_function_section ());
26802 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26803 }
26804
26805 /* Output a marker (i.e. a label) for the end of the generated code for a
26806 lexical block. */
26807
26808 static void
26809 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26810 {
26811 switch_to_section (current_function_section ());
26812 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26813 }
26814
26815 /* Returns nonzero if it is appropriate not to emit any debugging
26816 information for BLOCK, because it doesn't contain any instructions.
26817
26818 Don't allow this for blocks with nested functions or local classes
26819 as we would end up with orphans, and in the presence of scheduling
26820 we may end up calling them anyway. */
26821
26822 static bool
26823 dwarf2out_ignore_block (const_tree block)
26824 {
26825 tree decl;
26826 unsigned int i;
26827
26828 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26829 if (TREE_CODE (decl) == FUNCTION_DECL
26830 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26831 return 0;
26832 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26833 {
26834 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26835 if (TREE_CODE (decl) == FUNCTION_DECL
26836 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26837 return 0;
26838 }
26839
26840 return 1;
26841 }
26842
26843 /* Hash table routines for file_hash. */
26844
26845 bool
26846 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26847 {
26848 return filename_cmp (p1->filename, p2) == 0;
26849 }
26850
26851 hashval_t
26852 dwarf_file_hasher::hash (dwarf_file_data *p)
26853 {
26854 return htab_hash_string (p->filename);
26855 }
26856
26857 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26858 dwarf2out.c) and return its "index". The index of each (known) filename is
26859 just a unique number which is associated with only that one filename. We
26860 need such numbers for the sake of generating labels (in the .debug_sfnames
26861 section) and references to those files numbers (in the .debug_srcinfo
26862 and .debug_macinfo sections). If the filename given as an argument is not
26863 found in our current list, add it to the list and assign it the next
26864 available unique index number. */
26865
26866 static struct dwarf_file_data *
26867 lookup_filename (const char *file_name)
26868 {
26869 struct dwarf_file_data * created;
26870
26871 if (!file_name)
26872 return NULL;
26873
26874 dwarf_file_data **slot
26875 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26876 INSERT);
26877 if (*slot)
26878 return *slot;
26879
26880 created = ggc_alloc<dwarf_file_data> ();
26881 created->filename = file_name;
26882 created->emitted_number = 0;
26883 *slot = created;
26884 return created;
26885 }
26886
26887 /* If the assembler will construct the file table, then translate the compiler
26888 internal file table number into the assembler file table number, and emit
26889 a .file directive if we haven't already emitted one yet. The file table
26890 numbers are different because we prune debug info for unused variables and
26891 types, which may include filenames. */
26892
26893 static int
26894 maybe_emit_file (struct dwarf_file_data * fd)
26895 {
26896 if (! fd->emitted_number)
26897 {
26898 if (last_emitted_file)
26899 fd->emitted_number = last_emitted_file->emitted_number + 1;
26900 else
26901 fd->emitted_number = 1;
26902 last_emitted_file = fd;
26903
26904 if (output_asm_line_debug_info ())
26905 {
26906 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26907 output_quoted_string (asm_out_file,
26908 remap_debug_filename (fd->filename));
26909 fputc ('\n', asm_out_file);
26910 }
26911 }
26912
26913 return fd->emitted_number;
26914 }
26915
26916 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26917 That generation should happen after function debug info has been
26918 generated. The value of the attribute is the constant value of ARG. */
26919
26920 static void
26921 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26922 {
26923 die_arg_entry entry;
26924
26925 if (!die || !arg)
26926 return;
26927
26928 gcc_assert (early_dwarf);
26929
26930 if (!tmpl_value_parm_die_table)
26931 vec_alloc (tmpl_value_parm_die_table, 32);
26932
26933 entry.die = die;
26934 entry.arg = arg;
26935 vec_safe_push (tmpl_value_parm_die_table, entry);
26936 }
26937
26938 /* Return TRUE if T is an instance of generic type, FALSE
26939 otherwise. */
26940
26941 static bool
26942 generic_type_p (tree t)
26943 {
26944 if (t == NULL_TREE || !TYPE_P (t))
26945 return false;
26946 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26947 }
26948
26949 /* Schedule the generation of the generic parameter dies for the
26950 instance of generic type T. The proper generation itself is later
26951 done by gen_scheduled_generic_parms_dies. */
26952
26953 static void
26954 schedule_generic_params_dies_gen (tree t)
26955 {
26956 if (!generic_type_p (t))
26957 return;
26958
26959 gcc_assert (early_dwarf);
26960
26961 if (!generic_type_instances)
26962 vec_alloc (generic_type_instances, 256);
26963
26964 vec_safe_push (generic_type_instances, t);
26965 }
26966
26967 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26968 by append_entry_to_tmpl_value_parm_die_table. This function must
26969 be called after function DIEs have been generated. */
26970
26971 static void
26972 gen_remaining_tmpl_value_param_die_attribute (void)
26973 {
26974 if (tmpl_value_parm_die_table)
26975 {
26976 unsigned i, j;
26977 die_arg_entry *e;
26978
26979 /* We do this in two phases - first get the cases we can
26980 handle during early-finish, preserving those we cannot
26981 (containing symbolic constants where we don't yet know
26982 whether we are going to output the referenced symbols).
26983 For those we try again at late-finish. */
26984 j = 0;
26985 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
26986 {
26987 if (!e->die->removed
26988 && !tree_add_const_value_attribute (e->die, e->arg))
26989 {
26990 dw_loc_descr_ref loc = NULL;
26991 if (! early_dwarf
26992 && (dwarf_version >= 5 || !dwarf_strict))
26993 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
26994 if (loc)
26995 add_AT_loc (e->die, DW_AT_location, loc);
26996 else
26997 (*tmpl_value_parm_die_table)[j++] = *e;
26998 }
26999 }
27000 tmpl_value_parm_die_table->truncate (j);
27001 }
27002 }
27003
27004 /* Generate generic parameters DIEs for instances of generic types
27005 that have been previously scheduled by
27006 schedule_generic_params_dies_gen. This function must be called
27007 after all the types of the CU have been laid out. */
27008
27009 static void
27010 gen_scheduled_generic_parms_dies (void)
27011 {
27012 unsigned i;
27013 tree t;
27014
27015 if (!generic_type_instances)
27016 return;
27017
27018 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27019 if (COMPLETE_TYPE_P (t))
27020 gen_generic_params_dies (t);
27021
27022 generic_type_instances = NULL;
27023 }
27024
27025
27026 /* Replace DW_AT_name for the decl with name. */
27027
27028 static void
27029 dwarf2out_set_name (tree decl, tree name)
27030 {
27031 dw_die_ref die;
27032 dw_attr_node *attr;
27033 const char *dname;
27034
27035 die = TYPE_SYMTAB_DIE (decl);
27036 if (!die)
27037 return;
27038
27039 dname = dwarf2_name (name, 0);
27040 if (!dname)
27041 return;
27042
27043 attr = get_AT (die, DW_AT_name);
27044 if (attr)
27045 {
27046 struct indirect_string_node *node;
27047
27048 node = find_AT_string (dname);
27049 /* replace the string. */
27050 attr->dw_attr_val.v.val_str = node;
27051 }
27052
27053 else
27054 add_name_attribute (die, dname);
27055 }
27056
27057 /* True if before or during processing of the first function being emitted. */
27058 static bool in_first_function_p = true;
27059 /* True if loc_note during dwarf2out_var_location call might still be
27060 before first real instruction at address equal to .Ltext0. */
27061 static bool maybe_at_text_label_p = true;
27062 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27063 static unsigned int first_loclabel_num_not_at_text_label;
27064
27065 /* Look ahead for a real insn, or for a begin stmt marker. */
27066
27067 static rtx_insn *
27068 dwarf2out_next_real_insn (rtx_insn *loc_note)
27069 {
27070 rtx_insn *next_real = NEXT_INSN (loc_note);
27071
27072 while (next_real)
27073 if (INSN_P (next_real))
27074 break;
27075 else
27076 next_real = NEXT_INSN (next_real);
27077
27078 return next_real;
27079 }
27080
27081 /* Called by the final INSN scan whenever we see a var location. We
27082 use it to drop labels in the right places, and throw the location in
27083 our lookup table. */
27084
27085 static void
27086 dwarf2out_var_location (rtx_insn *loc_note)
27087 {
27088 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27089 struct var_loc_node *newloc;
27090 rtx_insn *next_real, *next_note;
27091 rtx_insn *call_insn = NULL;
27092 static const char *last_label;
27093 static const char *last_postcall_label;
27094 static bool last_in_cold_section_p;
27095 static rtx_insn *expected_next_loc_note;
27096 tree decl;
27097 bool var_loc_p;
27098 var_loc_view view = 0;
27099
27100 if (!NOTE_P (loc_note))
27101 {
27102 if (CALL_P (loc_note))
27103 {
27104 maybe_reset_location_view (loc_note, cur_line_info_table);
27105 call_site_count++;
27106 if (SIBLING_CALL_P (loc_note))
27107 tail_call_site_count++;
27108 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27109 {
27110 call_insn = loc_note;
27111 loc_note = NULL;
27112 var_loc_p = false;
27113
27114 next_real = dwarf2out_next_real_insn (call_insn);
27115 next_note = NULL;
27116 cached_next_real_insn = NULL;
27117 goto create_label;
27118 }
27119 if (optimize == 0 && !flag_var_tracking)
27120 {
27121 /* When the var-tracking pass is not running, there is no note
27122 for indirect calls whose target is compile-time known. In this
27123 case, process such calls specifically so that we generate call
27124 sites for them anyway. */
27125 rtx x = PATTERN (loc_note);
27126 if (GET_CODE (x) == PARALLEL)
27127 x = XVECEXP (x, 0, 0);
27128 if (GET_CODE (x) == SET)
27129 x = SET_SRC (x);
27130 if (GET_CODE (x) == CALL)
27131 x = XEXP (x, 0);
27132 if (!MEM_P (x)
27133 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27134 || !SYMBOL_REF_DECL (XEXP (x, 0))
27135 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27136 != FUNCTION_DECL))
27137 {
27138 call_insn = loc_note;
27139 loc_note = NULL;
27140 var_loc_p = false;
27141
27142 next_real = dwarf2out_next_real_insn (call_insn);
27143 next_note = NULL;
27144 cached_next_real_insn = NULL;
27145 goto create_label;
27146 }
27147 }
27148 }
27149 else if (!debug_variable_location_views)
27150 gcc_unreachable ();
27151 else
27152 maybe_reset_location_view (loc_note, cur_line_info_table);
27153
27154 return;
27155 }
27156
27157 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27158 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27159 return;
27160
27161 /* Optimize processing a large consecutive sequence of location
27162 notes so we don't spend too much time in next_real_insn. If the
27163 next insn is another location note, remember the next_real_insn
27164 calculation for next time. */
27165 next_real = cached_next_real_insn;
27166 if (next_real)
27167 {
27168 if (expected_next_loc_note != loc_note)
27169 next_real = NULL;
27170 }
27171
27172 next_note = NEXT_INSN (loc_note);
27173 if (! next_note
27174 || next_note->deleted ()
27175 || ! NOTE_P (next_note)
27176 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27177 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27178 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27179 next_note = NULL;
27180
27181 if (! next_real)
27182 next_real = dwarf2out_next_real_insn (loc_note);
27183
27184 if (next_note)
27185 {
27186 expected_next_loc_note = next_note;
27187 cached_next_real_insn = next_real;
27188 }
27189 else
27190 cached_next_real_insn = NULL;
27191
27192 /* If there are no instructions which would be affected by this note,
27193 don't do anything. */
27194 if (var_loc_p
27195 && next_real == NULL_RTX
27196 && !NOTE_DURING_CALL_P (loc_note))
27197 return;
27198
27199 create_label:
27200
27201 if (next_real == NULL_RTX)
27202 next_real = get_last_insn ();
27203
27204 /* If there were any real insns between note we processed last time
27205 and this note (or if it is the first note), clear
27206 last_{,postcall_}label so that they are not reused this time. */
27207 if (last_var_location_insn == NULL_RTX
27208 || last_var_location_insn != next_real
27209 || last_in_cold_section_p != in_cold_section_p)
27210 {
27211 last_label = NULL;
27212 last_postcall_label = NULL;
27213 }
27214
27215 if (var_loc_p)
27216 {
27217 const char *label
27218 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27219 view = cur_line_info_table->view;
27220 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27221 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27222 if (newloc == NULL)
27223 return;
27224 }
27225 else
27226 {
27227 decl = NULL_TREE;
27228 newloc = NULL;
27229 }
27230
27231 /* If there were no real insns between note we processed last time
27232 and this note, use the label we emitted last time. Otherwise
27233 create a new label and emit it. */
27234 if (last_label == NULL)
27235 {
27236 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27237 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27238 loclabel_num++;
27239 last_label = ggc_strdup (loclabel);
27240 /* See if loclabel might be equal to .Ltext0. If yes,
27241 bump first_loclabel_num_not_at_text_label. */
27242 if (!have_multiple_function_sections
27243 && in_first_function_p
27244 && maybe_at_text_label_p)
27245 {
27246 static rtx_insn *last_start;
27247 rtx_insn *insn;
27248 for (insn = loc_note; insn; insn = previous_insn (insn))
27249 if (insn == last_start)
27250 break;
27251 else if (!NONDEBUG_INSN_P (insn))
27252 continue;
27253 else
27254 {
27255 rtx body = PATTERN (insn);
27256 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27257 continue;
27258 /* Inline asm could occupy zero bytes. */
27259 else if (GET_CODE (body) == ASM_INPUT
27260 || asm_noperands (body) >= 0)
27261 continue;
27262 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27263 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27264 continue;
27265 #endif
27266 else
27267 {
27268 /* Assume insn has non-zero length. */
27269 maybe_at_text_label_p = false;
27270 break;
27271 }
27272 }
27273 if (maybe_at_text_label_p)
27274 {
27275 last_start = loc_note;
27276 first_loclabel_num_not_at_text_label = loclabel_num;
27277 }
27278 }
27279 }
27280
27281 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27282 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27283
27284 if (!var_loc_p)
27285 {
27286 struct call_arg_loc_node *ca_loc
27287 = ggc_cleared_alloc<call_arg_loc_node> ();
27288 rtx_insn *prev = call_insn;
27289
27290 ca_loc->call_arg_loc_note
27291 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27292 ca_loc->next = NULL;
27293 ca_loc->label = last_label;
27294 gcc_assert (prev
27295 && (CALL_P (prev)
27296 || (NONJUMP_INSN_P (prev)
27297 && GET_CODE (PATTERN (prev)) == SEQUENCE
27298 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27299 if (!CALL_P (prev))
27300 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27301 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27302
27303 /* Look for a SYMBOL_REF in the "prev" instruction. */
27304 rtx x = get_call_rtx_from (PATTERN (prev));
27305 if (x)
27306 {
27307 /* Try to get the call symbol, if any. */
27308 if (MEM_P (XEXP (x, 0)))
27309 x = XEXP (x, 0);
27310 /* First, look for a memory access to a symbol_ref. */
27311 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27312 && SYMBOL_REF_DECL (XEXP (x, 0))
27313 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27314 ca_loc->symbol_ref = XEXP (x, 0);
27315 /* Otherwise, look at a compile-time known user-level function
27316 declaration. */
27317 else if (MEM_P (x)
27318 && MEM_EXPR (x)
27319 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27320 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27321 }
27322
27323 ca_loc->block = insn_scope (prev);
27324 if (call_arg_locations)
27325 call_arg_loc_last->next = ca_loc;
27326 else
27327 call_arg_locations = ca_loc;
27328 call_arg_loc_last = ca_loc;
27329 }
27330 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27331 {
27332 newloc->label = last_label;
27333 newloc->view = view;
27334 }
27335 else
27336 {
27337 if (!last_postcall_label)
27338 {
27339 sprintf (loclabel, "%s-1", last_label);
27340 last_postcall_label = ggc_strdup (loclabel);
27341 }
27342 newloc->label = last_postcall_label;
27343 /* ??? This view is at last_label, not last_label-1, but we
27344 could only assume view at last_label-1 is zero if we could
27345 assume calls always have length greater than one. This is
27346 probably true in general, though there might be a rare
27347 exception to this rule, e.g. if a call insn is optimized out
27348 by target magic. Then, even the -1 in the label will be
27349 wrong, which might invalidate the range. Anyway, using view,
27350 though technically possibly incorrect, will work as far as
27351 ranges go: since L-1 is in the middle of the call insn,
27352 (L-1).0 and (L-1).V shouldn't make any difference, and having
27353 the loclist entry refer to the .loc entry might be useful, so
27354 leave it like this. */
27355 newloc->view = view;
27356 }
27357
27358 if (var_loc_p && flag_debug_asm)
27359 {
27360 const char *name, *sep, *patstr;
27361 if (decl && DECL_NAME (decl))
27362 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27363 else
27364 name = "";
27365 if (NOTE_VAR_LOCATION_LOC (loc_note))
27366 {
27367 sep = " => ";
27368 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27369 }
27370 else
27371 {
27372 sep = " ";
27373 patstr = "RESET";
27374 }
27375 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27376 name, sep, patstr);
27377 }
27378
27379 last_var_location_insn = next_real;
27380 last_in_cold_section_p = in_cold_section_p;
27381 }
27382
27383 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27384 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27385 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27386 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27387 BLOCK_FRAGMENT_ORIGIN links. */
27388 static bool
27389 block_within_block_p (tree block, tree outer, bool bothways)
27390 {
27391 if (block == outer)
27392 return true;
27393
27394 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27395 for (tree context = BLOCK_SUPERCONTEXT (block);
27396 context != outer;
27397 context = BLOCK_SUPERCONTEXT (context))
27398 if (!context || TREE_CODE (context) != BLOCK)
27399 return false;
27400
27401 if (!bothways)
27402 return true;
27403
27404 /* Now check that each block is actually referenced by its
27405 parent. */
27406 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27407 context = BLOCK_SUPERCONTEXT (context))
27408 {
27409 if (BLOCK_FRAGMENT_ORIGIN (context))
27410 {
27411 gcc_assert (!BLOCK_SUBBLOCKS (context));
27412 context = BLOCK_FRAGMENT_ORIGIN (context);
27413 }
27414 for (tree sub = BLOCK_SUBBLOCKS (context);
27415 sub != block;
27416 sub = BLOCK_CHAIN (sub))
27417 if (!sub)
27418 return false;
27419 if (context == outer)
27420 return true;
27421 else
27422 block = context;
27423 }
27424 }
27425
27426 /* Called during final while assembling the marker of the entry point
27427 for an inlined function. */
27428
27429 static void
27430 dwarf2out_inline_entry (tree block)
27431 {
27432 gcc_assert (debug_inline_points);
27433
27434 /* If we can't represent it, don't bother. */
27435 if (!(dwarf_version >= 3 || !dwarf_strict))
27436 return;
27437
27438 gcc_assert (DECL_P (block_ultimate_origin (block)));
27439
27440 /* Sanity check the block tree. This would catch a case in which
27441 BLOCK got removed from the tree reachable from the outermost
27442 lexical block, but got retained in markers. It would still link
27443 back to its parents, but some ancestor would be missing a link
27444 down the path to the sub BLOCK. If the block got removed, its
27445 BLOCK_NUMBER will not be a usable value. */
27446 if (flag_checking)
27447 gcc_assert (block_within_block_p (block,
27448 DECL_INITIAL (current_function_decl),
27449 true));
27450
27451 gcc_assert (inlined_function_outer_scope_p (block));
27452 gcc_assert (!BLOCK_DIE (block));
27453
27454 if (BLOCK_FRAGMENT_ORIGIN (block))
27455 block = BLOCK_FRAGMENT_ORIGIN (block);
27456 /* Can the entry point ever not be at the beginning of an
27457 unfragmented lexical block? */
27458 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27459 || (cur_line_info_table
27460 && !ZERO_VIEW_P (cur_line_info_table->view))))
27461 return;
27462
27463 if (!inline_entry_data_table)
27464 inline_entry_data_table
27465 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27466
27467
27468 inline_entry_data **iedp
27469 = inline_entry_data_table->find_slot_with_hash (block,
27470 htab_hash_pointer (block),
27471 INSERT);
27472 if (*iedp)
27473 /* ??? Ideally, we'd record all entry points for the same inlined
27474 function (some may have been duplicated by e.g. unrolling), but
27475 we have no way to represent that ATM. */
27476 return;
27477
27478 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27479 ied->block = block;
27480 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27481 ied->label_num = BLOCK_NUMBER (block);
27482 if (cur_line_info_table)
27483 ied->view = cur_line_info_table->view;
27484
27485 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27486
27487 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27488 BLOCK_NUMBER (block));
27489 ASM_OUTPUT_LABEL (asm_out_file, label);
27490 }
27491
27492 /* Called from finalize_size_functions for size functions so that their body
27493 can be encoded in the debug info to describe the layout of variable-length
27494 structures. */
27495
27496 static void
27497 dwarf2out_size_function (tree decl)
27498 {
27499 function_to_dwarf_procedure (decl);
27500 }
27501
27502 /* Note in one location list that text section has changed. */
27503
27504 int
27505 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27506 {
27507 var_loc_list *list = *slot;
27508 if (list->first)
27509 list->last_before_switch
27510 = list->last->next ? list->last->next : list->last;
27511 return 1;
27512 }
27513
27514 /* Note in all location lists that text section has changed. */
27515
27516 static void
27517 var_location_switch_text_section (void)
27518 {
27519 if (decl_loc_table == NULL)
27520 return;
27521
27522 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27523 }
27524
27525 /* Create a new line number table. */
27526
27527 static dw_line_info_table *
27528 new_line_info_table (void)
27529 {
27530 dw_line_info_table *table;
27531
27532 table = ggc_cleared_alloc<dw_line_info_table> ();
27533 table->file_num = 1;
27534 table->line_num = 1;
27535 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27536 FORCE_RESET_NEXT_VIEW (table->view);
27537 table->symviews_since_reset = 0;
27538
27539 return table;
27540 }
27541
27542 /* Lookup the "current" table into which we emit line info, so
27543 that we don't have to do it for every source line. */
27544
27545 static void
27546 set_cur_line_info_table (section *sec)
27547 {
27548 dw_line_info_table *table;
27549
27550 if (sec == text_section)
27551 table = text_section_line_info;
27552 else if (sec == cold_text_section)
27553 {
27554 table = cold_text_section_line_info;
27555 if (!table)
27556 {
27557 cold_text_section_line_info = table = new_line_info_table ();
27558 table->end_label = cold_end_label;
27559 }
27560 }
27561 else
27562 {
27563 const char *end_label;
27564
27565 if (crtl->has_bb_partition)
27566 {
27567 if (in_cold_section_p)
27568 end_label = crtl->subsections.cold_section_end_label;
27569 else
27570 end_label = crtl->subsections.hot_section_end_label;
27571 }
27572 else
27573 {
27574 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27575 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27576 current_function_funcdef_no);
27577 end_label = ggc_strdup (label);
27578 }
27579
27580 table = new_line_info_table ();
27581 table->end_label = end_label;
27582
27583 vec_safe_push (separate_line_info, table);
27584 }
27585
27586 if (output_asm_line_debug_info ())
27587 table->is_stmt = (cur_line_info_table
27588 ? cur_line_info_table->is_stmt
27589 : DWARF_LINE_DEFAULT_IS_STMT_START);
27590 cur_line_info_table = table;
27591 }
27592
27593
27594 /* We need to reset the locations at the beginning of each
27595 function. We can't do this in the end_function hook, because the
27596 declarations that use the locations won't have been output when
27597 that hook is called. Also compute have_multiple_function_sections here. */
27598
27599 static void
27600 dwarf2out_begin_function (tree fun)
27601 {
27602 section *sec = function_section (fun);
27603
27604 if (sec != text_section)
27605 have_multiple_function_sections = true;
27606
27607 if (crtl->has_bb_partition && !cold_text_section)
27608 {
27609 gcc_assert (current_function_decl == fun);
27610 cold_text_section = unlikely_text_section ();
27611 switch_to_section (cold_text_section);
27612 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27613 switch_to_section (sec);
27614 }
27615
27616 dwarf2out_note_section_used ();
27617 call_site_count = 0;
27618 tail_call_site_count = 0;
27619
27620 set_cur_line_info_table (sec);
27621 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27622 }
27623
27624 /* Helper function of dwarf2out_end_function, called only after emitting
27625 the very first function into assembly. Check if some .debug_loc range
27626 might end with a .LVL* label that could be equal to .Ltext0.
27627 In that case we must force using absolute addresses in .debug_loc ranges,
27628 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27629 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27630 list terminator.
27631 Set have_multiple_function_sections to true in that case and
27632 terminate htab traversal. */
27633
27634 int
27635 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27636 {
27637 var_loc_list *entry = *slot;
27638 struct var_loc_node *node;
27639
27640 node = entry->first;
27641 if (node && node->next && node->next->label)
27642 {
27643 unsigned int i;
27644 const char *label = node->next->label;
27645 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27646
27647 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27648 {
27649 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27650 if (strcmp (label, loclabel) == 0)
27651 {
27652 have_multiple_function_sections = true;
27653 return 0;
27654 }
27655 }
27656 }
27657 return 1;
27658 }
27659
27660 /* Hook called after emitting a function into assembly.
27661 This does something only for the very first function emitted. */
27662
27663 static void
27664 dwarf2out_end_function (unsigned int)
27665 {
27666 if (in_first_function_p
27667 && !have_multiple_function_sections
27668 && first_loclabel_num_not_at_text_label
27669 && decl_loc_table)
27670 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27671 in_first_function_p = false;
27672 maybe_at_text_label_p = false;
27673 }
27674
27675 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27676 front-ends register a translation unit even before dwarf2out_init is
27677 called. */
27678 static tree main_translation_unit = NULL_TREE;
27679
27680 /* Hook called by front-ends after they built their main translation unit.
27681 Associate comp_unit_die to UNIT. */
27682
27683 static void
27684 dwarf2out_register_main_translation_unit (tree unit)
27685 {
27686 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27687 && main_translation_unit == NULL_TREE);
27688 main_translation_unit = unit;
27689 /* If dwarf2out_init has not been called yet, it will perform the association
27690 itself looking at main_translation_unit. */
27691 if (decl_die_table != NULL)
27692 equate_decl_number_to_die (unit, comp_unit_die ());
27693 }
27694
27695 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27696
27697 static void
27698 push_dw_line_info_entry (dw_line_info_table *table,
27699 enum dw_line_info_opcode opcode, unsigned int val)
27700 {
27701 dw_line_info_entry e;
27702 e.opcode = opcode;
27703 e.val = val;
27704 vec_safe_push (table->entries, e);
27705 }
27706
27707 /* Output a label to mark the beginning of a source code line entry
27708 and record information relating to this source line, in
27709 'line_info_table' for later output of the .debug_line section. */
27710 /* ??? The discriminator parameter ought to be unsigned. */
27711
27712 static void
27713 dwarf2out_source_line (unsigned int line, unsigned int column,
27714 const char *filename,
27715 int discriminator, bool is_stmt)
27716 {
27717 unsigned int file_num;
27718 dw_line_info_table *table;
27719 static var_loc_view lvugid;
27720
27721 if (debug_info_level < DINFO_LEVEL_TERSE)
27722 return;
27723
27724 table = cur_line_info_table;
27725
27726 if (line == 0)
27727 {
27728 if (debug_variable_location_views
27729 && output_asm_line_debug_info ()
27730 && table && !RESETTING_VIEW_P (table->view))
27731 {
27732 /* If we're using the assembler to compute view numbers, we
27733 can't issue a .loc directive for line zero, so we can't
27734 get a view number at this point. We might attempt to
27735 compute it from the previous view, or equate it to a
27736 subsequent view (though it might not be there!), but
27737 since we're omitting the line number entry, we might as
27738 well omit the view number as well. That means pretending
27739 it's a view number zero, which might very well turn out
27740 to be correct. ??? Extend the assembler so that the
27741 compiler could emit e.g. ".locview .LVU#", to output a
27742 view without changing line number information. We'd then
27743 have to count it in symviews_since_reset; when it's omitted,
27744 it doesn't count. */
27745 if (!zero_view_p)
27746 zero_view_p = BITMAP_GGC_ALLOC ();
27747 bitmap_set_bit (zero_view_p, table->view);
27748 if (flag_debug_asm)
27749 {
27750 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27751 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27752 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27753 ASM_COMMENT_START);
27754 assemble_name (asm_out_file, label);
27755 putc ('\n', asm_out_file);
27756 }
27757 table->view = ++lvugid;
27758 }
27759 return;
27760 }
27761
27762 /* The discriminator column was added in dwarf4. Simplify the below
27763 by simply removing it if we're not supposed to output it. */
27764 if (dwarf_version < 4 && dwarf_strict)
27765 discriminator = 0;
27766
27767 if (!debug_column_info)
27768 column = 0;
27769
27770 file_num = maybe_emit_file (lookup_filename (filename));
27771
27772 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27773 the debugger has used the second (possibly duplicate) line number
27774 at the beginning of the function to mark the end of the prologue.
27775 We could eliminate any other duplicates within the function. For
27776 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27777 that second line number entry. */
27778 /* Recall that this end-of-prologue indication is *not* the same thing
27779 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27780 to which the hook corresponds, follows the last insn that was
27781 emitted by gen_prologue. What we need is to precede the first insn
27782 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27783 insn that corresponds to something the user wrote. These may be
27784 very different locations once scheduling is enabled. */
27785
27786 if (0 && file_num == table->file_num
27787 && line == table->line_num
27788 && column == table->column_num
27789 && discriminator == table->discrim_num
27790 && is_stmt == table->is_stmt)
27791 return;
27792
27793 switch_to_section (current_function_section ());
27794
27795 /* If requested, emit something human-readable. */
27796 if (flag_debug_asm)
27797 {
27798 if (debug_column_info)
27799 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27800 filename, line, column);
27801 else
27802 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27803 filename, line);
27804 }
27805
27806 if (output_asm_line_debug_info ())
27807 {
27808 /* Emit the .loc directive understood by GNU as. */
27809 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27810 file_num, line, is_stmt, discriminator */
27811 fputs ("\t.loc ", asm_out_file);
27812 fprint_ul (asm_out_file, file_num);
27813 putc (' ', asm_out_file);
27814 fprint_ul (asm_out_file, line);
27815 putc (' ', asm_out_file);
27816 fprint_ul (asm_out_file, column);
27817
27818 if (is_stmt != table->is_stmt)
27819 {
27820 fputs (" is_stmt ", asm_out_file);
27821 putc (is_stmt ? '1' : '0', asm_out_file);
27822 }
27823 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27824 {
27825 gcc_assert (discriminator > 0);
27826 fputs (" discriminator ", asm_out_file);
27827 fprint_ul (asm_out_file, (unsigned long) discriminator);
27828 }
27829 if (debug_variable_location_views)
27830 {
27831 if (!RESETTING_VIEW_P (table->view))
27832 {
27833 table->symviews_since_reset++;
27834 if (table->symviews_since_reset > symview_upper_bound)
27835 symview_upper_bound = table->symviews_since_reset;
27836 /* When we're using the assembler to compute view
27837 numbers, we output symbolic labels after "view" in
27838 .loc directives, and the assembler will set them for
27839 us, so that we can refer to the view numbers in
27840 location lists. The only exceptions are when we know
27841 a view will be zero: "-0" is a forced reset, used
27842 e.g. in the beginning of functions, whereas "0" tells
27843 the assembler to check that there was a PC change
27844 since the previous view, in a way that implicitly
27845 resets the next view. */
27846 fputs (" view ", asm_out_file);
27847 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27848 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27849 assemble_name (asm_out_file, label);
27850 table->view = ++lvugid;
27851 }
27852 else
27853 {
27854 table->symviews_since_reset = 0;
27855 if (FORCE_RESETTING_VIEW_P (table->view))
27856 fputs (" view -0", asm_out_file);
27857 else
27858 fputs (" view 0", asm_out_file);
27859 /* Mark the present view as a zero view. Earlier debug
27860 binds may have already added its id to loclists to be
27861 emitted later, so we can't reuse the id for something
27862 else. However, it's good to know whether a view is
27863 known to be zero, because then we may be able to
27864 optimize out locviews that are all zeros, so take
27865 note of it in zero_view_p. */
27866 if (!zero_view_p)
27867 zero_view_p = BITMAP_GGC_ALLOC ();
27868 bitmap_set_bit (zero_view_p, lvugid);
27869 table->view = ++lvugid;
27870 }
27871 }
27872 putc ('\n', asm_out_file);
27873 }
27874 else
27875 {
27876 unsigned int label_num = ++line_info_label_num;
27877
27878 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27879
27880 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27881 push_dw_line_info_entry (table, LI_adv_address, label_num);
27882 else
27883 push_dw_line_info_entry (table, LI_set_address, label_num);
27884 if (debug_variable_location_views)
27885 {
27886 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27887 if (resetting)
27888 table->view = 0;
27889
27890 if (flag_debug_asm)
27891 fprintf (asm_out_file, "\t%s view %s%d\n",
27892 ASM_COMMENT_START,
27893 resetting ? "-" : "",
27894 table->view);
27895
27896 table->view++;
27897 }
27898 if (file_num != table->file_num)
27899 push_dw_line_info_entry (table, LI_set_file, file_num);
27900 if (discriminator != table->discrim_num)
27901 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27902 if (is_stmt != table->is_stmt)
27903 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27904 push_dw_line_info_entry (table, LI_set_line, line);
27905 if (debug_column_info)
27906 push_dw_line_info_entry (table, LI_set_column, column);
27907 }
27908
27909 table->file_num = file_num;
27910 table->line_num = line;
27911 table->column_num = column;
27912 table->discrim_num = discriminator;
27913 table->is_stmt = is_stmt;
27914 table->in_use = true;
27915 }
27916
27917 /* Record the beginning of a new source file. */
27918
27919 static void
27920 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27921 {
27922 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27923 {
27924 macinfo_entry e;
27925 e.code = DW_MACINFO_start_file;
27926 e.lineno = lineno;
27927 e.info = ggc_strdup (filename);
27928 vec_safe_push (macinfo_table, e);
27929 }
27930 }
27931
27932 /* Record the end of a source file. */
27933
27934 static void
27935 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27936 {
27937 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27938 {
27939 macinfo_entry e;
27940 e.code = DW_MACINFO_end_file;
27941 e.lineno = lineno;
27942 e.info = NULL;
27943 vec_safe_push (macinfo_table, e);
27944 }
27945 }
27946
27947 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27948 the tail part of the directive line, i.e. the part which is past the
27949 initial whitespace, #, whitespace, directive-name, whitespace part. */
27950
27951 static void
27952 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27953 const char *buffer ATTRIBUTE_UNUSED)
27954 {
27955 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27956 {
27957 macinfo_entry e;
27958 /* Insert a dummy first entry to be able to optimize the whole
27959 predefined macro block using DW_MACRO_import. */
27960 if (macinfo_table->is_empty () && lineno <= 1)
27961 {
27962 e.code = 0;
27963 e.lineno = 0;
27964 e.info = NULL;
27965 vec_safe_push (macinfo_table, e);
27966 }
27967 e.code = DW_MACINFO_define;
27968 e.lineno = lineno;
27969 e.info = ggc_strdup (buffer);
27970 vec_safe_push (macinfo_table, e);
27971 }
27972 }
27973
27974 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
27975 the tail part of the directive line, i.e. the part which is past the
27976 initial whitespace, #, whitespace, directive-name, whitespace part. */
27977
27978 static void
27979 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
27980 const char *buffer ATTRIBUTE_UNUSED)
27981 {
27982 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27983 {
27984 macinfo_entry e;
27985 /* Insert a dummy first entry to be able to optimize the whole
27986 predefined macro block using DW_MACRO_import. */
27987 if (macinfo_table->is_empty () && lineno <= 1)
27988 {
27989 e.code = 0;
27990 e.lineno = 0;
27991 e.info = NULL;
27992 vec_safe_push (macinfo_table, e);
27993 }
27994 e.code = DW_MACINFO_undef;
27995 e.lineno = lineno;
27996 e.info = ggc_strdup (buffer);
27997 vec_safe_push (macinfo_table, e);
27998 }
27999 }
28000
28001 /* Helpers to manipulate hash table of CUs. */
28002
28003 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28004 {
28005 static inline hashval_t hash (const macinfo_entry *);
28006 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28007 };
28008
28009 inline hashval_t
28010 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28011 {
28012 return htab_hash_string (entry->info);
28013 }
28014
28015 inline bool
28016 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28017 const macinfo_entry *entry2)
28018 {
28019 return !strcmp (entry1->info, entry2->info);
28020 }
28021
28022 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28023
28024 /* Output a single .debug_macinfo entry. */
28025
28026 static void
28027 output_macinfo_op (macinfo_entry *ref)
28028 {
28029 int file_num;
28030 size_t len;
28031 struct indirect_string_node *node;
28032 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28033 struct dwarf_file_data *fd;
28034
28035 switch (ref->code)
28036 {
28037 case DW_MACINFO_start_file:
28038 fd = lookup_filename (ref->info);
28039 file_num = maybe_emit_file (fd);
28040 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28041 dw2_asm_output_data_uleb128 (ref->lineno,
28042 "Included from line number %lu",
28043 (unsigned long) ref->lineno);
28044 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28045 break;
28046 case DW_MACINFO_end_file:
28047 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28048 break;
28049 case DW_MACINFO_define:
28050 case DW_MACINFO_undef:
28051 len = strlen (ref->info) + 1;
28052 if (!dwarf_strict
28053 && len > DWARF_OFFSET_SIZE
28054 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28055 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28056 {
28057 ref->code = ref->code == DW_MACINFO_define
28058 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28059 output_macinfo_op (ref);
28060 return;
28061 }
28062 dw2_asm_output_data (1, ref->code,
28063 ref->code == DW_MACINFO_define
28064 ? "Define macro" : "Undefine macro");
28065 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28066 (unsigned long) ref->lineno);
28067 dw2_asm_output_nstring (ref->info, -1, "The macro");
28068 break;
28069 case DW_MACRO_define_strp:
28070 case DW_MACRO_undef_strp:
28071 node = find_AT_string (ref->info);
28072 gcc_assert (node
28073 && (node->form == DW_FORM_strp
28074 || node->form == dwarf_FORM (DW_FORM_strx)));
28075 dw2_asm_output_data (1, ref->code,
28076 ref->code == DW_MACRO_define_strp
28077 ? "Define macro strp"
28078 : "Undefine macro strp");
28079 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28080 (unsigned long) ref->lineno);
28081 if (node->form == DW_FORM_strp)
28082 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28083 debug_str_section, "The macro: \"%s\"",
28084 ref->info);
28085 else
28086 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28087 ref->info);
28088 break;
28089 case DW_MACRO_import:
28090 dw2_asm_output_data (1, ref->code, "Import");
28091 ASM_GENERATE_INTERNAL_LABEL (label,
28092 DEBUG_MACRO_SECTION_LABEL,
28093 ref->lineno + macinfo_label_base);
28094 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28095 break;
28096 default:
28097 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28098 ASM_COMMENT_START, (unsigned long) ref->code);
28099 break;
28100 }
28101 }
28102
28103 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28104 other compilation unit .debug_macinfo sections. IDX is the first
28105 index of a define/undef, return the number of ops that should be
28106 emitted in a comdat .debug_macinfo section and emit
28107 a DW_MACRO_import entry referencing it.
28108 If the define/undef entry should be emitted normally, return 0. */
28109
28110 static unsigned
28111 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28112 macinfo_hash_type **macinfo_htab)
28113 {
28114 macinfo_entry *first, *second, *cur, *inc;
28115 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28116 unsigned char checksum[16];
28117 struct md5_ctx ctx;
28118 char *grp_name, *tail;
28119 const char *base;
28120 unsigned int i, count, encoded_filename_len, linebuf_len;
28121 macinfo_entry **slot;
28122
28123 first = &(*macinfo_table)[idx];
28124 second = &(*macinfo_table)[idx + 1];
28125
28126 /* Optimize only if there are at least two consecutive define/undef ops,
28127 and either all of them are before first DW_MACINFO_start_file
28128 with lineno {0,1} (i.e. predefined macro block), or all of them are
28129 in some included header file. */
28130 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28131 return 0;
28132 if (vec_safe_is_empty (files))
28133 {
28134 if (first->lineno > 1 || second->lineno > 1)
28135 return 0;
28136 }
28137 else if (first->lineno == 0)
28138 return 0;
28139
28140 /* Find the last define/undef entry that can be grouped together
28141 with first and at the same time compute md5 checksum of their
28142 codes, linenumbers and strings. */
28143 md5_init_ctx (&ctx);
28144 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28145 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28146 break;
28147 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28148 break;
28149 else
28150 {
28151 unsigned char code = cur->code;
28152 md5_process_bytes (&code, 1, &ctx);
28153 checksum_uleb128 (cur->lineno, &ctx);
28154 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28155 }
28156 md5_finish_ctx (&ctx, checksum);
28157 count = i - idx;
28158
28159 /* From the containing include filename (if any) pick up just
28160 usable characters from its basename. */
28161 if (vec_safe_is_empty (files))
28162 base = "";
28163 else
28164 base = lbasename (files->last ().info);
28165 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28166 if (ISIDNUM (base[i]) || base[i] == '.')
28167 encoded_filename_len++;
28168 /* Count . at the end. */
28169 if (encoded_filename_len)
28170 encoded_filename_len++;
28171
28172 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28173 linebuf_len = strlen (linebuf);
28174
28175 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28176 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28177 + 16 * 2 + 1);
28178 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28179 tail = grp_name + 4;
28180 if (encoded_filename_len)
28181 {
28182 for (i = 0; base[i]; i++)
28183 if (ISIDNUM (base[i]) || base[i] == '.')
28184 *tail++ = base[i];
28185 *tail++ = '.';
28186 }
28187 memcpy (tail, linebuf, linebuf_len);
28188 tail += linebuf_len;
28189 *tail++ = '.';
28190 for (i = 0; i < 16; i++)
28191 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28192
28193 /* Construct a macinfo_entry for DW_MACRO_import
28194 in the empty vector entry before the first define/undef. */
28195 inc = &(*macinfo_table)[idx - 1];
28196 inc->code = DW_MACRO_import;
28197 inc->lineno = 0;
28198 inc->info = ggc_strdup (grp_name);
28199 if (!*macinfo_htab)
28200 *macinfo_htab = new macinfo_hash_type (10);
28201 /* Avoid emitting duplicates. */
28202 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28203 if (*slot != NULL)
28204 {
28205 inc->code = 0;
28206 inc->info = NULL;
28207 /* If such an entry has been used before, just emit
28208 a DW_MACRO_import op. */
28209 inc = *slot;
28210 output_macinfo_op (inc);
28211 /* And clear all macinfo_entry in the range to avoid emitting them
28212 in the second pass. */
28213 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28214 {
28215 cur->code = 0;
28216 cur->info = NULL;
28217 }
28218 }
28219 else
28220 {
28221 *slot = inc;
28222 inc->lineno = (*macinfo_htab)->elements ();
28223 output_macinfo_op (inc);
28224 }
28225 return count;
28226 }
28227
28228 /* Save any strings needed by the macinfo table in the debug str
28229 table. All strings must be collected into the table by the time
28230 index_string is called. */
28231
28232 static void
28233 save_macinfo_strings (void)
28234 {
28235 unsigned len;
28236 unsigned i;
28237 macinfo_entry *ref;
28238
28239 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28240 {
28241 switch (ref->code)
28242 {
28243 /* Match the logic in output_macinfo_op to decide on
28244 indirect strings. */
28245 case DW_MACINFO_define:
28246 case DW_MACINFO_undef:
28247 len = strlen (ref->info) + 1;
28248 if (!dwarf_strict
28249 && len > DWARF_OFFSET_SIZE
28250 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28251 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28252 set_indirect_string (find_AT_string (ref->info));
28253 break;
28254 case DW_MACRO_define_strp:
28255 case DW_MACRO_undef_strp:
28256 set_indirect_string (find_AT_string (ref->info));
28257 break;
28258 default:
28259 break;
28260 }
28261 }
28262 }
28263
28264 /* Output macinfo section(s). */
28265
28266 static void
28267 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28268 {
28269 unsigned i;
28270 unsigned long length = vec_safe_length (macinfo_table);
28271 macinfo_entry *ref;
28272 vec<macinfo_entry, va_gc> *files = NULL;
28273 macinfo_hash_type *macinfo_htab = NULL;
28274 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28275
28276 if (! length)
28277 return;
28278
28279 /* output_macinfo* uses these interchangeably. */
28280 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28281 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28282 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28283 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28284
28285 /* AIX Assembler inserts the length, so adjust the reference to match the
28286 offset expected by debuggers. */
28287 strcpy (dl_section_ref, debug_line_label);
28288 if (XCOFF_DEBUGGING_INFO)
28289 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28290
28291 /* For .debug_macro emit the section header. */
28292 if (!dwarf_strict || dwarf_version >= 5)
28293 {
28294 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28295 "DWARF macro version number");
28296 if (DWARF_OFFSET_SIZE == 8)
28297 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28298 else
28299 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28300 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28301 debug_line_section, NULL);
28302 }
28303
28304 /* In the first loop, it emits the primary .debug_macinfo section
28305 and after each emitted op the macinfo_entry is cleared.
28306 If a longer range of define/undef ops can be optimized using
28307 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28308 the vector before the first define/undef in the range and the
28309 whole range of define/undef ops is not emitted and kept. */
28310 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28311 {
28312 switch (ref->code)
28313 {
28314 case DW_MACINFO_start_file:
28315 vec_safe_push (files, *ref);
28316 break;
28317 case DW_MACINFO_end_file:
28318 if (!vec_safe_is_empty (files))
28319 files->pop ();
28320 break;
28321 case DW_MACINFO_define:
28322 case DW_MACINFO_undef:
28323 if ((!dwarf_strict || dwarf_version >= 5)
28324 && HAVE_COMDAT_GROUP
28325 && vec_safe_length (files) != 1
28326 && i > 0
28327 && i + 1 < length
28328 && (*macinfo_table)[i - 1].code == 0)
28329 {
28330 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28331 if (count)
28332 {
28333 i += count - 1;
28334 continue;
28335 }
28336 }
28337 break;
28338 case 0:
28339 /* A dummy entry may be inserted at the beginning to be able
28340 to optimize the whole block of predefined macros. */
28341 if (i == 0)
28342 continue;
28343 default:
28344 break;
28345 }
28346 output_macinfo_op (ref);
28347 ref->info = NULL;
28348 ref->code = 0;
28349 }
28350
28351 if (!macinfo_htab)
28352 return;
28353
28354 /* Save the number of transparent includes so we can adjust the
28355 label number for the fat LTO object DWARF. */
28356 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28357
28358 delete macinfo_htab;
28359 macinfo_htab = NULL;
28360
28361 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28362 terminate the current chain and switch to a new comdat .debug_macinfo
28363 section and emit the define/undef entries within it. */
28364 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28365 switch (ref->code)
28366 {
28367 case 0:
28368 continue;
28369 case DW_MACRO_import:
28370 {
28371 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28372 tree comdat_key = get_identifier (ref->info);
28373 /* Terminate the previous .debug_macinfo section. */
28374 dw2_asm_output_data (1, 0, "End compilation unit");
28375 targetm.asm_out.named_section (debug_macinfo_section_name,
28376 SECTION_DEBUG
28377 | SECTION_LINKONCE
28378 | (early_lto_debug
28379 ? SECTION_EXCLUDE : 0),
28380 comdat_key);
28381 ASM_GENERATE_INTERNAL_LABEL (label,
28382 DEBUG_MACRO_SECTION_LABEL,
28383 ref->lineno + macinfo_label_base);
28384 ASM_OUTPUT_LABEL (asm_out_file, label);
28385 ref->code = 0;
28386 ref->info = NULL;
28387 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28388 "DWARF macro version number");
28389 if (DWARF_OFFSET_SIZE == 8)
28390 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28391 else
28392 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28393 }
28394 break;
28395 case DW_MACINFO_define:
28396 case DW_MACINFO_undef:
28397 output_macinfo_op (ref);
28398 ref->code = 0;
28399 ref->info = NULL;
28400 break;
28401 default:
28402 gcc_unreachable ();
28403 }
28404
28405 macinfo_label_base += macinfo_label_base_adj;
28406 }
28407
28408 /* Initialize the various sections and labels for dwarf output and prefix
28409 them with PREFIX if non-NULL. Returns the generation (zero based
28410 number of times function was called). */
28411
28412 static unsigned
28413 init_sections_and_labels (bool early_lto_debug)
28414 {
28415 /* As we may get called multiple times have a generation count for
28416 labels. */
28417 static unsigned generation = 0;
28418
28419 if (early_lto_debug)
28420 {
28421 if (!dwarf_split_debug_info)
28422 {
28423 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28424 SECTION_DEBUG | SECTION_EXCLUDE,
28425 NULL);
28426 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28427 SECTION_DEBUG | SECTION_EXCLUDE,
28428 NULL);
28429 debug_macinfo_section_name
28430 = ((dwarf_strict && dwarf_version < 5)
28431 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28432 debug_macinfo_section = get_section (debug_macinfo_section_name,
28433 SECTION_DEBUG
28434 | SECTION_EXCLUDE, NULL);
28435 }
28436 else
28437 {
28438 /* ??? Which of the following do we need early? */
28439 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28440 SECTION_DEBUG | SECTION_EXCLUDE,
28441 NULL);
28442 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28443 SECTION_DEBUG | SECTION_EXCLUDE,
28444 NULL);
28445 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28446 SECTION_DEBUG
28447 | SECTION_EXCLUDE, NULL);
28448 debug_skeleton_abbrev_section
28449 = get_section (DEBUG_LTO_ABBREV_SECTION,
28450 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28451 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28452 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28453 generation);
28454
28455 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28456 stay in the main .o, but the skeleton_line goes into the split
28457 off dwo. */
28458 debug_skeleton_line_section
28459 = get_section (DEBUG_LTO_LINE_SECTION,
28460 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28461 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28462 DEBUG_SKELETON_LINE_SECTION_LABEL,
28463 generation);
28464 debug_str_offsets_section
28465 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28466 SECTION_DEBUG | SECTION_EXCLUDE,
28467 NULL);
28468 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28469 DEBUG_SKELETON_INFO_SECTION_LABEL,
28470 generation);
28471 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28472 DEBUG_STR_DWO_SECTION_FLAGS,
28473 NULL);
28474 debug_macinfo_section_name
28475 = ((dwarf_strict && dwarf_version < 5)
28476 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28477 debug_macinfo_section = get_section (debug_macinfo_section_name,
28478 SECTION_DEBUG | SECTION_EXCLUDE,
28479 NULL);
28480 }
28481 /* For macro info and the file table we have to refer to a
28482 debug_line section. */
28483 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28484 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28485 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28486 DEBUG_LINE_SECTION_LABEL, generation);
28487
28488 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28489 DEBUG_STR_SECTION_FLAGS
28490 | SECTION_EXCLUDE, NULL);
28491 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28492 debug_line_str_section
28493 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28494 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28495 }
28496 else
28497 {
28498 if (!dwarf_split_debug_info)
28499 {
28500 debug_info_section = get_section (DEBUG_INFO_SECTION,
28501 SECTION_DEBUG, NULL);
28502 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28503 SECTION_DEBUG, NULL);
28504 debug_loc_section = get_section (dwarf_version >= 5
28505 ? DEBUG_LOCLISTS_SECTION
28506 : DEBUG_LOC_SECTION,
28507 SECTION_DEBUG, NULL);
28508 debug_macinfo_section_name
28509 = ((dwarf_strict && dwarf_version < 5)
28510 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28511 debug_macinfo_section = get_section (debug_macinfo_section_name,
28512 SECTION_DEBUG, NULL);
28513 }
28514 else
28515 {
28516 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28517 SECTION_DEBUG | SECTION_EXCLUDE,
28518 NULL);
28519 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28520 SECTION_DEBUG | SECTION_EXCLUDE,
28521 NULL);
28522 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28523 SECTION_DEBUG, NULL);
28524 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28525 SECTION_DEBUG, NULL);
28526 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28527 SECTION_DEBUG, NULL);
28528 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28529 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28530 generation);
28531
28532 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28533 stay in the main .o, but the skeleton_line goes into the
28534 split off dwo. */
28535 debug_skeleton_line_section
28536 = get_section (DEBUG_DWO_LINE_SECTION,
28537 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28538 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28539 DEBUG_SKELETON_LINE_SECTION_LABEL,
28540 generation);
28541 debug_str_offsets_section
28542 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28543 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28544 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28545 DEBUG_SKELETON_INFO_SECTION_LABEL,
28546 generation);
28547 debug_loc_section = get_section (dwarf_version >= 5
28548 ? DEBUG_DWO_LOCLISTS_SECTION
28549 : DEBUG_DWO_LOC_SECTION,
28550 SECTION_DEBUG | SECTION_EXCLUDE,
28551 NULL);
28552 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28553 DEBUG_STR_DWO_SECTION_FLAGS,
28554 NULL);
28555 debug_macinfo_section_name
28556 = ((dwarf_strict && dwarf_version < 5)
28557 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28558 debug_macinfo_section = get_section (debug_macinfo_section_name,
28559 SECTION_DEBUG | SECTION_EXCLUDE,
28560 NULL);
28561 }
28562 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28563 SECTION_DEBUG, NULL);
28564 debug_line_section = get_section (DEBUG_LINE_SECTION,
28565 SECTION_DEBUG, NULL);
28566 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28567 SECTION_DEBUG, NULL);
28568 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28569 SECTION_DEBUG, NULL);
28570 debug_str_section = get_section (DEBUG_STR_SECTION,
28571 DEBUG_STR_SECTION_FLAGS, NULL);
28572 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28573 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28574 DEBUG_STR_SECTION_FLAGS, NULL);
28575
28576 debug_ranges_section = get_section (dwarf_version >= 5
28577 ? DEBUG_RNGLISTS_SECTION
28578 : DEBUG_RANGES_SECTION,
28579 SECTION_DEBUG, NULL);
28580 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28581 SECTION_DEBUG, NULL);
28582 }
28583
28584 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28585 DEBUG_ABBREV_SECTION_LABEL, generation);
28586 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28587 DEBUG_INFO_SECTION_LABEL, generation);
28588 info_section_emitted = false;
28589 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28590 DEBUG_LINE_SECTION_LABEL, generation);
28591 /* There are up to 4 unique ranges labels per generation.
28592 See also output_rnglists. */
28593 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28594 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28595 if (dwarf_version >= 5 && dwarf_split_debug_info)
28596 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28597 DEBUG_RANGES_SECTION_LABEL,
28598 1 + generation * 4);
28599 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28600 DEBUG_ADDR_SECTION_LABEL, generation);
28601 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28602 (dwarf_strict && dwarf_version < 5)
28603 ? DEBUG_MACINFO_SECTION_LABEL
28604 : DEBUG_MACRO_SECTION_LABEL, generation);
28605 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28606 generation);
28607
28608 ++generation;
28609 return generation - 1;
28610 }
28611
28612 /* Set up for Dwarf output at the start of compilation. */
28613
28614 static void
28615 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28616 {
28617 /* Allocate the file_table. */
28618 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28619
28620 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28621 /* Allocate the decl_die_table. */
28622 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28623
28624 /* Allocate the decl_loc_table. */
28625 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28626
28627 /* Allocate the cached_dw_loc_list_table. */
28628 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28629
28630 /* Allocate the initial hunk of the abbrev_die_table. */
28631 vec_alloc (abbrev_die_table, 256);
28632 /* Zero-th entry is allocated, but unused. */
28633 abbrev_die_table->quick_push (NULL);
28634
28635 /* Allocate the dwarf_proc_stack_usage_map. */
28636 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28637
28638 /* Allocate the pubtypes and pubnames vectors. */
28639 vec_alloc (pubname_table, 32);
28640 vec_alloc (pubtype_table, 32);
28641
28642 vec_alloc (incomplete_types, 64);
28643
28644 vec_alloc (used_rtx_array, 32);
28645
28646 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28647 vec_alloc (macinfo_table, 64);
28648 #endif
28649
28650 /* If front-ends already registered a main translation unit but we were not
28651 ready to perform the association, do this now. */
28652 if (main_translation_unit != NULL_TREE)
28653 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28654 }
28655
28656 /* Called before compile () starts outputtting functions, variables
28657 and toplevel asms into assembly. */
28658
28659 static void
28660 dwarf2out_assembly_start (void)
28661 {
28662 if (text_section_line_info)
28663 return;
28664
28665 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28666 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28667 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28668 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28669 COLD_TEXT_SECTION_LABEL, 0);
28670 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28671
28672 switch_to_section (text_section);
28673 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28674 #endif
28675
28676 /* Make sure the line number table for .text always exists. */
28677 text_section_line_info = new_line_info_table ();
28678 text_section_line_info->end_label = text_end_label;
28679
28680 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28681 cur_line_info_table = text_section_line_info;
28682 #endif
28683
28684 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28685 && dwarf2out_do_cfi_asm ()
28686 && !dwarf2out_do_eh_frame ())
28687 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28688 }
28689
28690 /* A helper function for dwarf2out_finish called through
28691 htab_traverse. Assign a string its index. All strings must be
28692 collected into the table by the time index_string is called,
28693 because the indexing code relies on htab_traverse to traverse nodes
28694 in the same order for each run. */
28695
28696 int
28697 index_string (indirect_string_node **h, unsigned int *index)
28698 {
28699 indirect_string_node *node = *h;
28700
28701 find_string_form (node);
28702 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28703 {
28704 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28705 node->index = *index;
28706 *index += 1;
28707 }
28708 return 1;
28709 }
28710
28711 /* A helper function for output_indirect_strings called through
28712 htab_traverse. Output the offset to a string and update the
28713 current offset. */
28714
28715 int
28716 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28717 {
28718 indirect_string_node *node = *h;
28719
28720 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28721 {
28722 /* Assert that this node has been assigned an index. */
28723 gcc_assert (node->index != NO_INDEX_ASSIGNED
28724 && node->index != NOT_INDEXED);
28725 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28726 "indexed string 0x%x: %s", node->index, node->str);
28727 *offset += strlen (node->str) + 1;
28728 }
28729 return 1;
28730 }
28731
28732 /* A helper function for dwarf2out_finish called through
28733 htab_traverse. Output the indexed string. */
28734
28735 int
28736 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28737 {
28738 struct indirect_string_node *node = *h;
28739
28740 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28741 {
28742 /* Assert that the strings are output in the same order as their
28743 indexes were assigned. */
28744 gcc_assert (*cur_idx == node->index);
28745 assemble_string (node->str, strlen (node->str) + 1);
28746 *cur_idx += 1;
28747 }
28748 return 1;
28749 }
28750
28751 /* A helper function for output_indirect_strings. Counts the number
28752 of index strings offsets. Must match the logic of the functions
28753 output_index_string[_offsets] above. */
28754 int
28755 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28756 {
28757 struct indirect_string_node *node = *h;
28758
28759 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28760 *last_idx += 1;
28761 return 1;
28762 }
28763
28764 /* A helper function for dwarf2out_finish called through
28765 htab_traverse. Emit one queued .debug_str string. */
28766
28767 int
28768 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28769 {
28770 struct indirect_string_node *node = *h;
28771
28772 node->form = find_string_form (node);
28773 if (node->form == form && node->refcount > 0)
28774 {
28775 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28776 assemble_string (node->str, strlen (node->str) + 1);
28777 }
28778
28779 return 1;
28780 }
28781
28782 /* Output the indexed string table. */
28783
28784 static void
28785 output_indirect_strings (void)
28786 {
28787 switch_to_section (debug_str_section);
28788 if (!dwarf_split_debug_info)
28789 debug_str_hash->traverse<enum dwarf_form,
28790 output_indirect_string> (DW_FORM_strp);
28791 else
28792 {
28793 unsigned int offset = 0;
28794 unsigned int cur_idx = 0;
28795
28796 if (skeleton_debug_str_hash)
28797 skeleton_debug_str_hash->traverse<enum dwarf_form,
28798 output_indirect_string> (DW_FORM_strp);
28799
28800 switch_to_section (debug_str_offsets_section);
28801 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28802 header. Note that we don't need to generate a label to the
28803 actual index table following the header here, because this is
28804 for the split dwarf case only. In an .dwo file there is only
28805 one string offsets table (and one debug info section). But
28806 if we would start using string offset tables for the main (or
28807 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28808 pointing to the actual index after the header. Split dwarf
28809 units will never have a string offsets base attribute. When
28810 a split unit is moved into a .dwp file the string offsets can
28811 be found through the .debug_cu_index section table. */
28812 if (dwarf_version >= 5)
28813 {
28814 unsigned int last_idx = 0;
28815 unsigned long str_offsets_length;
28816
28817 debug_str_hash->traverse_noresize
28818 <unsigned int *, count_index_strings> (&last_idx);
28819 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28820 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28821 dw2_asm_output_data (4, 0xffffffff,
28822 "Escape value for 64-bit DWARF extension");
28823 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28824 "Length of string offsets unit");
28825 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28826 dw2_asm_output_data (2, 0, "Header zero padding");
28827 }
28828 debug_str_hash->traverse_noresize
28829 <unsigned int *, output_index_string_offset> (&offset);
28830 switch_to_section (debug_str_dwo_section);
28831 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28832 (&cur_idx);
28833 }
28834 }
28835
28836 /* Callback for htab_traverse to assign an index to an entry in the
28837 table, and to write that entry to the .debug_addr section. */
28838
28839 int
28840 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28841 {
28842 addr_table_entry *entry = *slot;
28843
28844 if (entry->refcount == 0)
28845 {
28846 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28847 || entry->index == NOT_INDEXED);
28848 return 1;
28849 }
28850
28851 gcc_assert (entry->index == *cur_index);
28852 (*cur_index)++;
28853
28854 switch (entry->kind)
28855 {
28856 case ate_kind_rtx:
28857 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28858 "0x%x", entry->index);
28859 break;
28860 case ate_kind_rtx_dtprel:
28861 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28862 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28863 DWARF2_ADDR_SIZE,
28864 entry->addr.rtl);
28865 fputc ('\n', asm_out_file);
28866 break;
28867 case ate_kind_label:
28868 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28869 "0x%x", entry->index);
28870 break;
28871 default:
28872 gcc_unreachable ();
28873 }
28874 return 1;
28875 }
28876
28877 /* A helper function for dwarf2out_finish. Counts the number
28878 of indexed addresses. Must match the logic of the functions
28879 output_addr_table_entry above. */
28880 int
28881 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
28882 {
28883 addr_table_entry *entry = *slot;
28884
28885 if (entry->refcount > 0)
28886 *last_idx += 1;
28887 return 1;
28888 }
28889
28890 /* Produce the .debug_addr section. */
28891
28892 static void
28893 output_addr_table (void)
28894 {
28895 unsigned int index = 0;
28896 if (addr_index_table == NULL || addr_index_table->size () == 0)
28897 return;
28898
28899 switch_to_section (debug_addr_section);
28900 addr_index_table
28901 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28902 }
28903
28904 #if ENABLE_ASSERT_CHECKING
28905 /* Verify that all marks are clear. */
28906
28907 static void
28908 verify_marks_clear (dw_die_ref die)
28909 {
28910 dw_die_ref c;
28911
28912 gcc_assert (! die->die_mark);
28913 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28914 }
28915 #endif /* ENABLE_ASSERT_CHECKING */
28916
28917 /* Clear the marks for a die and its children.
28918 Be cool if the mark isn't set. */
28919
28920 static void
28921 prune_unmark_dies (dw_die_ref die)
28922 {
28923 dw_die_ref c;
28924
28925 if (die->die_mark)
28926 die->die_mark = 0;
28927 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28928 }
28929
28930 /* Given LOC that is referenced by a DIE we're marking as used, find all
28931 referenced DWARF procedures it references and mark them as used. */
28932
28933 static void
28934 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28935 {
28936 for (; loc != NULL; loc = loc->dw_loc_next)
28937 switch (loc->dw_loc_opc)
28938 {
28939 case DW_OP_implicit_pointer:
28940 case DW_OP_convert:
28941 case DW_OP_reinterpret:
28942 case DW_OP_GNU_implicit_pointer:
28943 case DW_OP_GNU_convert:
28944 case DW_OP_GNU_reinterpret:
28945 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28946 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28947 break;
28948 case DW_OP_GNU_variable_value:
28949 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28950 {
28951 dw_die_ref ref
28952 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28953 if (ref == NULL)
28954 break;
28955 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28956 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28957 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28958 }
28959 /* FALLTHRU */
28960 case DW_OP_call2:
28961 case DW_OP_call4:
28962 case DW_OP_call_ref:
28963 case DW_OP_const_type:
28964 case DW_OP_GNU_const_type:
28965 case DW_OP_GNU_parameter_ref:
28966 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
28967 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28968 break;
28969 case DW_OP_regval_type:
28970 case DW_OP_deref_type:
28971 case DW_OP_GNU_regval_type:
28972 case DW_OP_GNU_deref_type:
28973 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
28974 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
28975 break;
28976 case DW_OP_entry_value:
28977 case DW_OP_GNU_entry_value:
28978 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
28979 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
28980 break;
28981 default:
28982 break;
28983 }
28984 }
28985
28986 /* Given DIE that we're marking as used, find any other dies
28987 it references as attributes and mark them as used. */
28988
28989 static void
28990 prune_unused_types_walk_attribs (dw_die_ref die)
28991 {
28992 dw_attr_node *a;
28993 unsigned ix;
28994
28995 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
28996 {
28997 switch (AT_class (a))
28998 {
28999 /* Make sure DWARF procedures referenced by location descriptions will
29000 get emitted. */
29001 case dw_val_class_loc:
29002 prune_unused_types_walk_loc_descr (AT_loc (a));
29003 break;
29004 case dw_val_class_loc_list:
29005 for (dw_loc_list_ref list = AT_loc_list (a);
29006 list != NULL;
29007 list = list->dw_loc_next)
29008 prune_unused_types_walk_loc_descr (list->expr);
29009 break;
29010
29011 case dw_val_class_view_list:
29012 /* This points to a loc_list in another attribute, so it's
29013 already covered. */
29014 break;
29015
29016 case dw_val_class_die_ref:
29017 /* A reference to another DIE.
29018 Make sure that it will get emitted.
29019 If it was broken out into a comdat group, don't follow it. */
29020 if (! AT_ref (a)->comdat_type_p
29021 || a->dw_attr == DW_AT_specification)
29022 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29023 break;
29024
29025 case dw_val_class_str:
29026 /* Set the string's refcount to 0 so that prune_unused_types_mark
29027 accounts properly for it. */
29028 a->dw_attr_val.v.val_str->refcount = 0;
29029 break;
29030
29031 default:
29032 break;
29033 }
29034 }
29035 }
29036
29037 /* Mark the generic parameters and arguments children DIEs of DIE. */
29038
29039 static void
29040 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29041 {
29042 dw_die_ref c;
29043
29044 if (die == NULL || die->die_child == NULL)
29045 return;
29046 c = die->die_child;
29047 do
29048 {
29049 if (is_template_parameter (c))
29050 prune_unused_types_mark (c, 1);
29051 c = c->die_sib;
29052 } while (c && c != die->die_child);
29053 }
29054
29055 /* Mark DIE as being used. If DOKIDS is true, then walk down
29056 to DIE's children. */
29057
29058 static void
29059 prune_unused_types_mark (dw_die_ref die, int dokids)
29060 {
29061 dw_die_ref c;
29062
29063 if (die->die_mark == 0)
29064 {
29065 /* We haven't done this node yet. Mark it as used. */
29066 die->die_mark = 1;
29067 /* If this is the DIE of a generic type instantiation,
29068 mark the children DIEs that describe its generic parms and
29069 args. */
29070 prune_unused_types_mark_generic_parms_dies (die);
29071
29072 /* We also have to mark its parents as used.
29073 (But we don't want to mark our parent's kids due to this,
29074 unless it is a class.) */
29075 if (die->die_parent)
29076 prune_unused_types_mark (die->die_parent,
29077 class_scope_p (die->die_parent));
29078
29079 /* Mark any referenced nodes. */
29080 prune_unused_types_walk_attribs (die);
29081
29082 /* If this node is a specification,
29083 also mark the definition, if it exists. */
29084 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29085 prune_unused_types_mark (die->die_definition, 1);
29086 }
29087
29088 if (dokids && die->die_mark != 2)
29089 {
29090 /* We need to walk the children, but haven't done so yet.
29091 Remember that we've walked the kids. */
29092 die->die_mark = 2;
29093
29094 /* If this is an array type, we need to make sure our
29095 kids get marked, even if they're types. If we're
29096 breaking out types into comdat sections, do this
29097 for all type definitions. */
29098 if (die->die_tag == DW_TAG_array_type
29099 || (use_debug_types
29100 && is_type_die (die) && ! is_declaration_die (die)))
29101 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29102 else
29103 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29104 }
29105 }
29106
29107 /* For local classes, look if any static member functions were emitted
29108 and if so, mark them. */
29109
29110 static void
29111 prune_unused_types_walk_local_classes (dw_die_ref die)
29112 {
29113 dw_die_ref c;
29114
29115 if (die->die_mark == 2)
29116 return;
29117
29118 switch (die->die_tag)
29119 {
29120 case DW_TAG_structure_type:
29121 case DW_TAG_union_type:
29122 case DW_TAG_class_type:
29123 break;
29124
29125 case DW_TAG_subprogram:
29126 if (!get_AT_flag (die, DW_AT_declaration)
29127 || die->die_definition != NULL)
29128 prune_unused_types_mark (die, 1);
29129 return;
29130
29131 default:
29132 return;
29133 }
29134
29135 /* Mark children. */
29136 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29137 }
29138
29139 /* Walk the tree DIE and mark types that we actually use. */
29140
29141 static void
29142 prune_unused_types_walk (dw_die_ref die)
29143 {
29144 dw_die_ref c;
29145
29146 /* Don't do anything if this node is already marked and
29147 children have been marked as well. */
29148 if (die->die_mark == 2)
29149 return;
29150
29151 switch (die->die_tag)
29152 {
29153 case DW_TAG_structure_type:
29154 case DW_TAG_union_type:
29155 case DW_TAG_class_type:
29156 if (die->die_perennial_p)
29157 break;
29158
29159 for (c = die->die_parent; c; c = c->die_parent)
29160 if (c->die_tag == DW_TAG_subprogram)
29161 break;
29162
29163 /* Finding used static member functions inside of classes
29164 is needed just for local classes, because for other classes
29165 static member function DIEs with DW_AT_specification
29166 are emitted outside of the DW_TAG_*_type. If we ever change
29167 it, we'd need to call this even for non-local classes. */
29168 if (c)
29169 prune_unused_types_walk_local_classes (die);
29170
29171 /* It's a type node --- don't mark it. */
29172 return;
29173
29174 case DW_TAG_const_type:
29175 case DW_TAG_packed_type:
29176 case DW_TAG_pointer_type:
29177 case DW_TAG_reference_type:
29178 case DW_TAG_rvalue_reference_type:
29179 case DW_TAG_volatile_type:
29180 case DW_TAG_typedef:
29181 case DW_TAG_array_type:
29182 case DW_TAG_interface_type:
29183 case DW_TAG_friend:
29184 case DW_TAG_enumeration_type:
29185 case DW_TAG_subroutine_type:
29186 case DW_TAG_string_type:
29187 case DW_TAG_set_type:
29188 case DW_TAG_subrange_type:
29189 case DW_TAG_ptr_to_member_type:
29190 case DW_TAG_file_type:
29191 /* Type nodes are useful only when other DIEs reference them --- don't
29192 mark them. */
29193 /* FALLTHROUGH */
29194
29195 case DW_TAG_dwarf_procedure:
29196 /* Likewise for DWARF procedures. */
29197
29198 if (die->die_perennial_p)
29199 break;
29200
29201 return;
29202
29203 default:
29204 /* Mark everything else. */
29205 break;
29206 }
29207
29208 if (die->die_mark == 0)
29209 {
29210 die->die_mark = 1;
29211
29212 /* Now, mark any dies referenced from here. */
29213 prune_unused_types_walk_attribs (die);
29214 }
29215
29216 die->die_mark = 2;
29217
29218 /* Mark children. */
29219 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29220 }
29221
29222 /* Increment the string counts on strings referred to from DIE's
29223 attributes. */
29224
29225 static void
29226 prune_unused_types_update_strings (dw_die_ref die)
29227 {
29228 dw_attr_node *a;
29229 unsigned ix;
29230
29231 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29232 if (AT_class (a) == dw_val_class_str)
29233 {
29234 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29235 s->refcount++;
29236 /* Avoid unnecessarily putting strings that are used less than
29237 twice in the hash table. */
29238 if (s->refcount
29239 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29240 {
29241 indirect_string_node **slot
29242 = debug_str_hash->find_slot_with_hash (s->str,
29243 htab_hash_string (s->str),
29244 INSERT);
29245 gcc_assert (*slot == NULL);
29246 *slot = s;
29247 }
29248 }
29249 }
29250
29251 /* Mark DIE and its children as removed. */
29252
29253 static void
29254 mark_removed (dw_die_ref die)
29255 {
29256 dw_die_ref c;
29257 die->removed = true;
29258 FOR_EACH_CHILD (die, c, mark_removed (c));
29259 }
29260
29261 /* Remove from the tree DIE any dies that aren't marked. */
29262
29263 static void
29264 prune_unused_types_prune (dw_die_ref die)
29265 {
29266 dw_die_ref c;
29267
29268 gcc_assert (die->die_mark);
29269 prune_unused_types_update_strings (die);
29270
29271 if (! die->die_child)
29272 return;
29273
29274 c = die->die_child;
29275 do {
29276 dw_die_ref prev = c, next;
29277 for (c = c->die_sib; ! c->die_mark; c = next)
29278 if (c == die->die_child)
29279 {
29280 /* No marked children between 'prev' and the end of the list. */
29281 if (prev == c)
29282 /* No marked children at all. */
29283 die->die_child = NULL;
29284 else
29285 {
29286 prev->die_sib = c->die_sib;
29287 die->die_child = prev;
29288 }
29289 c->die_sib = NULL;
29290 mark_removed (c);
29291 return;
29292 }
29293 else
29294 {
29295 next = c->die_sib;
29296 c->die_sib = NULL;
29297 mark_removed (c);
29298 }
29299
29300 if (c != prev->die_sib)
29301 prev->die_sib = c;
29302 prune_unused_types_prune (c);
29303 } while (c != die->die_child);
29304 }
29305
29306 /* Remove dies representing declarations that we never use. */
29307
29308 static void
29309 prune_unused_types (void)
29310 {
29311 unsigned int i;
29312 limbo_die_node *node;
29313 comdat_type_node *ctnode;
29314 pubname_entry *pub;
29315 dw_die_ref base_type;
29316
29317 #if ENABLE_ASSERT_CHECKING
29318 /* All the marks should already be clear. */
29319 verify_marks_clear (comp_unit_die ());
29320 for (node = limbo_die_list; node; node = node->next)
29321 verify_marks_clear (node->die);
29322 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29323 verify_marks_clear (ctnode->root_die);
29324 #endif /* ENABLE_ASSERT_CHECKING */
29325
29326 /* Mark types that are used in global variables. */
29327 premark_types_used_by_global_vars ();
29328
29329 /* Set the mark on nodes that are actually used. */
29330 prune_unused_types_walk (comp_unit_die ());
29331 for (node = limbo_die_list; node; node = node->next)
29332 prune_unused_types_walk (node->die);
29333 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29334 {
29335 prune_unused_types_walk (ctnode->root_die);
29336 prune_unused_types_mark (ctnode->type_die, 1);
29337 }
29338
29339 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29340 are unusual in that they are pubnames that are the children of pubtypes.
29341 They should only be marked via their parent DW_TAG_enumeration_type die,
29342 not as roots in themselves. */
29343 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29344 if (pub->die->die_tag != DW_TAG_enumerator)
29345 prune_unused_types_mark (pub->die, 1);
29346 for (i = 0; base_types.iterate (i, &base_type); i++)
29347 prune_unused_types_mark (base_type, 1);
29348
29349 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29350 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29351 callees). */
29352 cgraph_node *cnode;
29353 FOR_EACH_FUNCTION (cnode)
29354 if (cnode->referred_to_p (false))
29355 {
29356 dw_die_ref die = lookup_decl_die (cnode->decl);
29357 if (die == NULL || die->die_mark)
29358 continue;
29359 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29360 if (e->caller != cnode
29361 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29362 {
29363 prune_unused_types_mark (die, 1);
29364 break;
29365 }
29366 }
29367
29368 if (debug_str_hash)
29369 debug_str_hash->empty ();
29370 if (skeleton_debug_str_hash)
29371 skeleton_debug_str_hash->empty ();
29372 prune_unused_types_prune (comp_unit_die ());
29373 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29374 {
29375 node = *pnode;
29376 if (!node->die->die_mark)
29377 *pnode = node->next;
29378 else
29379 {
29380 prune_unused_types_prune (node->die);
29381 pnode = &node->next;
29382 }
29383 }
29384 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29385 prune_unused_types_prune (ctnode->root_die);
29386
29387 /* Leave the marks clear. */
29388 prune_unmark_dies (comp_unit_die ());
29389 for (node = limbo_die_list; node; node = node->next)
29390 prune_unmark_dies (node->die);
29391 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29392 prune_unmark_dies (ctnode->root_die);
29393 }
29394
29395 /* Helpers to manipulate hash table of comdat type units. */
29396
29397 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29398 {
29399 static inline hashval_t hash (const comdat_type_node *);
29400 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29401 };
29402
29403 inline hashval_t
29404 comdat_type_hasher::hash (const comdat_type_node *type_node)
29405 {
29406 hashval_t h;
29407 memcpy (&h, type_node->signature, sizeof (h));
29408 return h;
29409 }
29410
29411 inline bool
29412 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29413 const comdat_type_node *type_node_2)
29414 {
29415 return (! memcmp (type_node_1->signature, type_node_2->signature,
29416 DWARF_TYPE_SIGNATURE_SIZE));
29417 }
29418
29419 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29420 to the location it would have been added, should we know its
29421 DECL_ASSEMBLER_NAME when we added other attributes. This will
29422 probably improve compactness of debug info, removing equivalent
29423 abbrevs, and hide any differences caused by deferring the
29424 computation of the assembler name, triggered by e.g. PCH. */
29425
29426 static inline void
29427 move_linkage_attr (dw_die_ref die)
29428 {
29429 unsigned ix = vec_safe_length (die->die_attr);
29430 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29431
29432 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29433 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29434
29435 while (--ix > 0)
29436 {
29437 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29438
29439 if (prev->dw_attr == DW_AT_decl_line
29440 || prev->dw_attr == DW_AT_decl_column
29441 || prev->dw_attr == DW_AT_name)
29442 break;
29443 }
29444
29445 if (ix != vec_safe_length (die->die_attr) - 1)
29446 {
29447 die->die_attr->pop ();
29448 die->die_attr->quick_insert (ix, linkage);
29449 }
29450 }
29451
29452 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29453 referenced from typed stack ops and count how often they are used. */
29454
29455 static void
29456 mark_base_types (dw_loc_descr_ref loc)
29457 {
29458 dw_die_ref base_type = NULL;
29459
29460 for (; loc; loc = loc->dw_loc_next)
29461 {
29462 switch (loc->dw_loc_opc)
29463 {
29464 case DW_OP_regval_type:
29465 case DW_OP_deref_type:
29466 case DW_OP_GNU_regval_type:
29467 case DW_OP_GNU_deref_type:
29468 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29469 break;
29470 case DW_OP_convert:
29471 case DW_OP_reinterpret:
29472 case DW_OP_GNU_convert:
29473 case DW_OP_GNU_reinterpret:
29474 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29475 continue;
29476 /* FALLTHRU */
29477 case DW_OP_const_type:
29478 case DW_OP_GNU_const_type:
29479 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29480 break;
29481 case DW_OP_entry_value:
29482 case DW_OP_GNU_entry_value:
29483 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29484 continue;
29485 default:
29486 continue;
29487 }
29488 gcc_assert (base_type->die_parent == comp_unit_die ());
29489 if (base_type->die_mark)
29490 base_type->die_mark++;
29491 else
29492 {
29493 base_types.safe_push (base_type);
29494 base_type->die_mark = 1;
29495 }
29496 }
29497 }
29498
29499 /* Comparison function for sorting marked base types. */
29500
29501 static int
29502 base_type_cmp (const void *x, const void *y)
29503 {
29504 dw_die_ref dx = *(const dw_die_ref *) x;
29505 dw_die_ref dy = *(const dw_die_ref *) y;
29506 unsigned int byte_size1, byte_size2;
29507 unsigned int encoding1, encoding2;
29508 unsigned int align1, align2;
29509 if (dx->die_mark > dy->die_mark)
29510 return -1;
29511 if (dx->die_mark < dy->die_mark)
29512 return 1;
29513 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29514 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29515 if (byte_size1 < byte_size2)
29516 return 1;
29517 if (byte_size1 > byte_size2)
29518 return -1;
29519 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29520 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29521 if (encoding1 < encoding2)
29522 return 1;
29523 if (encoding1 > encoding2)
29524 return -1;
29525 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29526 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29527 if (align1 < align2)
29528 return 1;
29529 if (align1 > align2)
29530 return -1;
29531 return 0;
29532 }
29533
29534 /* Move base types marked by mark_base_types as early as possible
29535 in the CU, sorted by decreasing usage count both to make the
29536 uleb128 references as small as possible and to make sure they
29537 will have die_offset already computed by calc_die_sizes when
29538 sizes of typed stack loc ops is computed. */
29539
29540 static void
29541 move_marked_base_types (void)
29542 {
29543 unsigned int i;
29544 dw_die_ref base_type, die, c;
29545
29546 if (base_types.is_empty ())
29547 return;
29548
29549 /* Sort by decreasing usage count, they will be added again in that
29550 order later on. */
29551 base_types.qsort (base_type_cmp);
29552 die = comp_unit_die ();
29553 c = die->die_child;
29554 do
29555 {
29556 dw_die_ref prev = c;
29557 c = c->die_sib;
29558 while (c->die_mark)
29559 {
29560 remove_child_with_prev (c, prev);
29561 /* As base types got marked, there must be at least
29562 one node other than DW_TAG_base_type. */
29563 gcc_assert (die->die_child != NULL);
29564 c = prev->die_sib;
29565 }
29566 }
29567 while (c != die->die_child);
29568 gcc_assert (die->die_child);
29569 c = die->die_child;
29570 for (i = 0; base_types.iterate (i, &base_type); i++)
29571 {
29572 base_type->die_mark = 0;
29573 base_type->die_sib = c->die_sib;
29574 c->die_sib = base_type;
29575 c = base_type;
29576 }
29577 }
29578
29579 /* Helper function for resolve_addr, attempt to resolve
29580 one CONST_STRING, return true if successful. Similarly verify that
29581 SYMBOL_REFs refer to variables emitted in the current CU. */
29582
29583 static bool
29584 resolve_one_addr (rtx *addr)
29585 {
29586 rtx rtl = *addr;
29587
29588 if (GET_CODE (rtl) == CONST_STRING)
29589 {
29590 size_t len = strlen (XSTR (rtl, 0)) + 1;
29591 tree t = build_string (len, XSTR (rtl, 0));
29592 tree tlen = size_int (len - 1);
29593 TREE_TYPE (t)
29594 = build_array_type (char_type_node, build_index_type (tlen));
29595 rtl = lookup_constant_def (t);
29596 if (!rtl || !MEM_P (rtl))
29597 return false;
29598 rtl = XEXP (rtl, 0);
29599 if (GET_CODE (rtl) == SYMBOL_REF
29600 && SYMBOL_REF_DECL (rtl)
29601 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29602 return false;
29603 vec_safe_push (used_rtx_array, rtl);
29604 *addr = rtl;
29605 return true;
29606 }
29607
29608 if (GET_CODE (rtl) == SYMBOL_REF
29609 && SYMBOL_REF_DECL (rtl))
29610 {
29611 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29612 {
29613 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29614 return false;
29615 }
29616 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29617 return false;
29618 }
29619
29620 if (GET_CODE (rtl) == CONST)
29621 {
29622 subrtx_ptr_iterator::array_type array;
29623 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29624 if (!resolve_one_addr (*iter))
29625 return false;
29626 }
29627
29628 return true;
29629 }
29630
29631 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29632 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29633 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29634
29635 static rtx
29636 string_cst_pool_decl (tree t)
29637 {
29638 rtx rtl = output_constant_def (t, 1);
29639 unsigned char *array;
29640 dw_loc_descr_ref l;
29641 tree decl;
29642 size_t len;
29643 dw_die_ref ref;
29644
29645 if (!rtl || !MEM_P (rtl))
29646 return NULL_RTX;
29647 rtl = XEXP (rtl, 0);
29648 if (GET_CODE (rtl) != SYMBOL_REF
29649 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29650 return NULL_RTX;
29651
29652 decl = SYMBOL_REF_DECL (rtl);
29653 if (!lookup_decl_die (decl))
29654 {
29655 len = TREE_STRING_LENGTH (t);
29656 vec_safe_push (used_rtx_array, rtl);
29657 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29658 array = ggc_vec_alloc<unsigned char> (len);
29659 memcpy (array, TREE_STRING_POINTER (t), len);
29660 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29661 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29662 l->dw_loc_oprnd2.v.val_vec.length = len;
29663 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29664 l->dw_loc_oprnd2.v.val_vec.array = array;
29665 add_AT_loc (ref, DW_AT_location, l);
29666 equate_decl_number_to_die (decl, ref);
29667 }
29668 return rtl;
29669 }
29670
29671 /* Helper function of resolve_addr_in_expr. LOC is
29672 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29673 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29674 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29675 with DW_OP_implicit_pointer if possible
29676 and return true, if unsuccessful, return false. */
29677
29678 static bool
29679 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29680 {
29681 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29682 HOST_WIDE_INT offset = 0;
29683 dw_die_ref ref = NULL;
29684 tree decl;
29685
29686 if (GET_CODE (rtl) == CONST
29687 && GET_CODE (XEXP (rtl, 0)) == PLUS
29688 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29689 {
29690 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29691 rtl = XEXP (XEXP (rtl, 0), 0);
29692 }
29693 if (GET_CODE (rtl) == CONST_STRING)
29694 {
29695 size_t len = strlen (XSTR (rtl, 0)) + 1;
29696 tree t = build_string (len, XSTR (rtl, 0));
29697 tree tlen = size_int (len - 1);
29698
29699 TREE_TYPE (t)
29700 = build_array_type (char_type_node, build_index_type (tlen));
29701 rtl = string_cst_pool_decl (t);
29702 if (!rtl)
29703 return false;
29704 }
29705 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29706 {
29707 decl = SYMBOL_REF_DECL (rtl);
29708 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29709 {
29710 ref = lookup_decl_die (decl);
29711 if (ref && (get_AT (ref, DW_AT_location)
29712 || get_AT (ref, DW_AT_const_value)))
29713 {
29714 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29715 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29716 loc->dw_loc_oprnd1.val_entry = NULL;
29717 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29718 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29719 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29720 loc->dw_loc_oprnd2.v.val_int = offset;
29721 return true;
29722 }
29723 }
29724 }
29725 return false;
29726 }
29727
29728 /* Helper function for resolve_addr, handle one location
29729 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29730 the location list couldn't be resolved. */
29731
29732 static bool
29733 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29734 {
29735 dw_loc_descr_ref keep = NULL;
29736 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29737 switch (loc->dw_loc_opc)
29738 {
29739 case DW_OP_addr:
29740 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29741 {
29742 if ((prev == NULL
29743 || prev->dw_loc_opc == DW_OP_piece
29744 || prev->dw_loc_opc == DW_OP_bit_piece)
29745 && loc->dw_loc_next
29746 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29747 && (!dwarf_strict || dwarf_version >= 5)
29748 && optimize_one_addr_into_implicit_ptr (loc))
29749 break;
29750 return false;
29751 }
29752 break;
29753 case DW_OP_GNU_addr_index:
29754 case DW_OP_addrx:
29755 case DW_OP_GNU_const_index:
29756 case DW_OP_constx:
29757 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29758 || loc->dw_loc_opc == DW_OP_addrx)
29759 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29760 || loc->dw_loc_opc == DW_OP_constx)
29761 && loc->dtprel))
29762 {
29763 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29764 if (!resolve_one_addr (&rtl))
29765 return false;
29766 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29767 loc->dw_loc_oprnd1.val_entry
29768 = add_addr_table_entry (rtl, ate_kind_rtx);
29769 }
29770 break;
29771 case DW_OP_const4u:
29772 case DW_OP_const8u:
29773 if (loc->dtprel
29774 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29775 return false;
29776 break;
29777 case DW_OP_plus_uconst:
29778 if (size_of_loc_descr (loc)
29779 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29780 + 1
29781 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29782 {
29783 dw_loc_descr_ref repl
29784 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29785 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29786 add_loc_descr (&repl, loc->dw_loc_next);
29787 *loc = *repl;
29788 }
29789 break;
29790 case DW_OP_implicit_value:
29791 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29792 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29793 return false;
29794 break;
29795 case DW_OP_implicit_pointer:
29796 case DW_OP_GNU_implicit_pointer:
29797 case DW_OP_GNU_parameter_ref:
29798 case DW_OP_GNU_variable_value:
29799 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29800 {
29801 dw_die_ref ref
29802 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29803 if (ref == NULL)
29804 return false;
29805 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29806 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29807 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29808 }
29809 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29810 {
29811 if (prev == NULL
29812 && loc->dw_loc_next == NULL
29813 && AT_class (a) == dw_val_class_loc)
29814 switch (a->dw_attr)
29815 {
29816 /* Following attributes allow both exprloc and reference,
29817 so if the whole expression is DW_OP_GNU_variable_value
29818 alone we could transform it into reference. */
29819 case DW_AT_byte_size:
29820 case DW_AT_bit_size:
29821 case DW_AT_lower_bound:
29822 case DW_AT_upper_bound:
29823 case DW_AT_bit_stride:
29824 case DW_AT_count:
29825 case DW_AT_allocated:
29826 case DW_AT_associated:
29827 case DW_AT_byte_stride:
29828 a->dw_attr_val.val_class = dw_val_class_die_ref;
29829 a->dw_attr_val.val_entry = NULL;
29830 a->dw_attr_val.v.val_die_ref.die
29831 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29832 a->dw_attr_val.v.val_die_ref.external = 0;
29833 return true;
29834 default:
29835 break;
29836 }
29837 if (dwarf_strict)
29838 return false;
29839 }
29840 break;
29841 case DW_OP_const_type:
29842 case DW_OP_regval_type:
29843 case DW_OP_deref_type:
29844 case DW_OP_convert:
29845 case DW_OP_reinterpret:
29846 case DW_OP_GNU_const_type:
29847 case DW_OP_GNU_regval_type:
29848 case DW_OP_GNU_deref_type:
29849 case DW_OP_GNU_convert:
29850 case DW_OP_GNU_reinterpret:
29851 while (loc->dw_loc_next
29852 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29853 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29854 {
29855 dw_die_ref base1, base2;
29856 unsigned enc1, enc2, size1, size2;
29857 if (loc->dw_loc_opc == DW_OP_regval_type
29858 || loc->dw_loc_opc == DW_OP_deref_type
29859 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29860 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29861 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29862 else if (loc->dw_loc_oprnd1.val_class
29863 == dw_val_class_unsigned_const)
29864 break;
29865 else
29866 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29867 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29868 == dw_val_class_unsigned_const)
29869 break;
29870 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29871 gcc_assert (base1->die_tag == DW_TAG_base_type
29872 && base2->die_tag == DW_TAG_base_type);
29873 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29874 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29875 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29876 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29877 if (size1 == size2
29878 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29879 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29880 && loc != keep)
29881 || enc1 == enc2))
29882 {
29883 /* Optimize away next DW_OP_convert after
29884 adjusting LOC's base type die reference. */
29885 if (loc->dw_loc_opc == DW_OP_regval_type
29886 || loc->dw_loc_opc == DW_OP_deref_type
29887 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29888 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29889 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29890 else
29891 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29892 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29893 continue;
29894 }
29895 /* Don't change integer DW_OP_convert after e.g. floating
29896 point typed stack entry. */
29897 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29898 keep = loc->dw_loc_next;
29899 break;
29900 }
29901 break;
29902 default:
29903 break;
29904 }
29905 return true;
29906 }
29907
29908 /* Helper function of resolve_addr. DIE had DW_AT_location of
29909 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29910 and DW_OP_addr couldn't be resolved. resolve_addr has already
29911 removed the DW_AT_location attribute. This function attempts to
29912 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29913 to it or DW_AT_const_value attribute, if possible. */
29914
29915 static void
29916 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29917 {
29918 if (!VAR_P (decl)
29919 || lookup_decl_die (decl) != die
29920 || DECL_EXTERNAL (decl)
29921 || !TREE_STATIC (decl)
29922 || DECL_INITIAL (decl) == NULL_TREE
29923 || DECL_P (DECL_INITIAL (decl))
29924 || get_AT (die, DW_AT_const_value))
29925 return;
29926
29927 tree init = DECL_INITIAL (decl);
29928 HOST_WIDE_INT offset = 0;
29929 /* For variables that have been optimized away and thus
29930 don't have a memory location, see if we can emit
29931 DW_AT_const_value instead. */
29932 if (tree_add_const_value_attribute (die, init))
29933 return;
29934 if (dwarf_strict && dwarf_version < 5)
29935 return;
29936 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29937 and ADDR_EXPR refers to a decl that has DW_AT_location or
29938 DW_AT_const_value (but isn't addressable, otherwise
29939 resolving the original DW_OP_addr wouldn't fail), see if
29940 we can add DW_OP_implicit_pointer. */
29941 STRIP_NOPS (init);
29942 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29943 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29944 {
29945 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29946 init = TREE_OPERAND (init, 0);
29947 STRIP_NOPS (init);
29948 }
29949 if (TREE_CODE (init) != ADDR_EXPR)
29950 return;
29951 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29952 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
29953 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
29954 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
29955 && TREE_OPERAND (init, 0) != decl))
29956 {
29957 dw_die_ref ref;
29958 dw_loc_descr_ref l;
29959
29960 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
29961 {
29962 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
29963 if (!rtl)
29964 return;
29965 decl = SYMBOL_REF_DECL (rtl);
29966 }
29967 else
29968 decl = TREE_OPERAND (init, 0);
29969 ref = lookup_decl_die (decl);
29970 if (ref == NULL
29971 || (!get_AT (ref, DW_AT_location)
29972 && !get_AT (ref, DW_AT_const_value)))
29973 return;
29974 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
29975 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29976 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
29977 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
29978 add_AT_loc (die, DW_AT_location, l);
29979 }
29980 }
29981
29982 /* Return NULL if l is a DWARF expression, or first op that is not
29983 valid DWARF expression. */
29984
29985 static dw_loc_descr_ref
29986 non_dwarf_expression (dw_loc_descr_ref l)
29987 {
29988 while (l)
29989 {
29990 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
29991 return l;
29992 switch (l->dw_loc_opc)
29993 {
29994 case DW_OP_regx:
29995 case DW_OP_implicit_value:
29996 case DW_OP_stack_value:
29997 case DW_OP_implicit_pointer:
29998 case DW_OP_GNU_implicit_pointer:
29999 case DW_OP_GNU_parameter_ref:
30000 case DW_OP_piece:
30001 case DW_OP_bit_piece:
30002 return l;
30003 default:
30004 break;
30005 }
30006 l = l->dw_loc_next;
30007 }
30008 return NULL;
30009 }
30010
30011 /* Return adjusted copy of EXPR:
30012 If it is empty DWARF expression, return it.
30013 If it is valid non-empty DWARF expression,
30014 return copy of EXPR with DW_OP_deref appended to it.
30015 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30016 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30017 If it is DWARF expression followed by DW_OP_stack_value, return
30018 copy of the DWARF expression without anything appended.
30019 Otherwise, return NULL. */
30020
30021 static dw_loc_descr_ref
30022 copy_deref_exprloc (dw_loc_descr_ref expr)
30023 {
30024 dw_loc_descr_ref tail = NULL;
30025
30026 if (expr == NULL)
30027 return NULL;
30028
30029 dw_loc_descr_ref l = non_dwarf_expression (expr);
30030 if (l && l->dw_loc_next)
30031 return NULL;
30032
30033 if (l)
30034 {
30035 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30036 tail = new_loc_descr ((enum dwarf_location_atom)
30037 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30038 0, 0);
30039 else
30040 switch (l->dw_loc_opc)
30041 {
30042 case DW_OP_regx:
30043 tail = new_loc_descr (DW_OP_bregx,
30044 l->dw_loc_oprnd1.v.val_unsigned, 0);
30045 break;
30046 case DW_OP_stack_value:
30047 break;
30048 default:
30049 return NULL;
30050 }
30051 }
30052 else
30053 tail = new_loc_descr (DW_OP_deref, 0, 0);
30054
30055 dw_loc_descr_ref ret = NULL, *p = &ret;
30056 while (expr != l)
30057 {
30058 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30059 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30060 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30061 p = &(*p)->dw_loc_next;
30062 expr = expr->dw_loc_next;
30063 }
30064 *p = tail;
30065 return ret;
30066 }
30067
30068 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30069 reference to a variable or argument, adjust it if needed and return:
30070 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30071 attribute if present should be removed
30072 0 keep the attribute perhaps with minor modifications, no need to rescan
30073 1 if the attribute has been successfully adjusted. */
30074
30075 static int
30076 optimize_string_length (dw_attr_node *a)
30077 {
30078 dw_loc_descr_ref l = AT_loc (a), lv;
30079 dw_die_ref die;
30080 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30081 {
30082 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30083 die = lookup_decl_die (decl);
30084 if (die)
30085 {
30086 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30087 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30088 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30089 }
30090 else
30091 return -1;
30092 }
30093 else
30094 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30095
30096 /* DWARF5 allows reference class, so we can then reference the DIE.
30097 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30098 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30099 {
30100 a->dw_attr_val.val_class = dw_val_class_die_ref;
30101 a->dw_attr_val.val_entry = NULL;
30102 a->dw_attr_val.v.val_die_ref.die = die;
30103 a->dw_attr_val.v.val_die_ref.external = 0;
30104 return 0;
30105 }
30106
30107 dw_attr_node *av = get_AT (die, DW_AT_location);
30108 dw_loc_list_ref d;
30109 bool non_dwarf_expr = false;
30110
30111 if (av == NULL)
30112 return dwarf_strict ? -1 : 0;
30113 switch (AT_class (av))
30114 {
30115 case dw_val_class_loc_list:
30116 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30117 if (d->expr && non_dwarf_expression (d->expr))
30118 non_dwarf_expr = true;
30119 break;
30120 case dw_val_class_view_list:
30121 gcc_unreachable ();
30122 case dw_val_class_loc:
30123 lv = AT_loc (av);
30124 if (lv == NULL)
30125 return dwarf_strict ? -1 : 0;
30126 if (non_dwarf_expression (lv))
30127 non_dwarf_expr = true;
30128 break;
30129 default:
30130 return dwarf_strict ? -1 : 0;
30131 }
30132
30133 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30134 into DW_OP_call4 or DW_OP_GNU_variable_value into
30135 DW_OP_call4 DW_OP_deref, do so. */
30136 if (!non_dwarf_expr
30137 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30138 {
30139 l->dw_loc_opc = DW_OP_call4;
30140 if (l->dw_loc_next)
30141 l->dw_loc_next = NULL;
30142 else
30143 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30144 return 0;
30145 }
30146
30147 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30148 copy over the DW_AT_location attribute from die to a. */
30149 if (l->dw_loc_next != NULL)
30150 {
30151 a->dw_attr_val = av->dw_attr_val;
30152 return 1;
30153 }
30154
30155 dw_loc_list_ref list, *p;
30156 switch (AT_class (av))
30157 {
30158 case dw_val_class_loc_list:
30159 p = &list;
30160 list = NULL;
30161 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30162 {
30163 lv = copy_deref_exprloc (d->expr);
30164 if (lv)
30165 {
30166 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30167 p = &(*p)->dw_loc_next;
30168 }
30169 else if (!dwarf_strict && d->expr)
30170 return 0;
30171 }
30172 if (list == NULL)
30173 return dwarf_strict ? -1 : 0;
30174 a->dw_attr_val.val_class = dw_val_class_loc_list;
30175 gen_llsym (list);
30176 *AT_loc_list_ptr (a) = list;
30177 return 1;
30178 case dw_val_class_loc:
30179 lv = copy_deref_exprloc (AT_loc (av));
30180 if (lv == NULL)
30181 return dwarf_strict ? -1 : 0;
30182 a->dw_attr_val.v.val_loc = lv;
30183 return 1;
30184 default:
30185 gcc_unreachable ();
30186 }
30187 }
30188
30189 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30190 an address in .rodata section if the string literal is emitted there,
30191 or remove the containing location list or replace DW_AT_const_value
30192 with DW_AT_location and empty location expression, if it isn't found
30193 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30194 to something that has been emitted in the current CU. */
30195
30196 static void
30197 resolve_addr (dw_die_ref die)
30198 {
30199 dw_die_ref c;
30200 dw_attr_node *a;
30201 dw_loc_list_ref *curr, *start, loc;
30202 unsigned ix;
30203 bool remove_AT_byte_size = false;
30204
30205 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30206 switch (AT_class (a))
30207 {
30208 case dw_val_class_loc_list:
30209 start = curr = AT_loc_list_ptr (a);
30210 loc = *curr;
30211 gcc_assert (loc);
30212 /* The same list can be referenced more than once. See if we have
30213 already recorded the result from a previous pass. */
30214 if (loc->replaced)
30215 *curr = loc->dw_loc_next;
30216 else if (!loc->resolved_addr)
30217 {
30218 /* As things stand, we do not expect or allow one die to
30219 reference a suffix of another die's location list chain.
30220 References must be identical or completely separate.
30221 There is therefore no need to cache the result of this
30222 pass on any list other than the first; doing so
30223 would lead to unnecessary writes. */
30224 while (*curr)
30225 {
30226 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30227 if (!resolve_addr_in_expr (a, (*curr)->expr))
30228 {
30229 dw_loc_list_ref next = (*curr)->dw_loc_next;
30230 dw_loc_descr_ref l = (*curr)->expr;
30231
30232 if (next && (*curr)->ll_symbol)
30233 {
30234 gcc_assert (!next->ll_symbol);
30235 next->ll_symbol = (*curr)->ll_symbol;
30236 next->vl_symbol = (*curr)->vl_symbol;
30237 }
30238 if (dwarf_split_debug_info)
30239 remove_loc_list_addr_table_entries (l);
30240 *curr = next;
30241 }
30242 else
30243 {
30244 mark_base_types ((*curr)->expr);
30245 curr = &(*curr)->dw_loc_next;
30246 }
30247 }
30248 if (loc == *start)
30249 loc->resolved_addr = 1;
30250 else
30251 {
30252 loc->replaced = 1;
30253 loc->dw_loc_next = *start;
30254 }
30255 }
30256 if (!*start)
30257 {
30258 remove_AT (die, a->dw_attr);
30259 ix--;
30260 }
30261 break;
30262 case dw_val_class_view_list:
30263 {
30264 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30265 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30266 dw_val_node *llnode
30267 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30268 /* If we no longer have a loclist, or it no longer needs
30269 views, drop this attribute. */
30270 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30271 {
30272 remove_AT (die, a->dw_attr);
30273 ix--;
30274 }
30275 break;
30276 }
30277 case dw_val_class_loc:
30278 {
30279 dw_loc_descr_ref l = AT_loc (a);
30280 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30281 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30282 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30283 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30284 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30285 with DW_FORM_ref referencing the same DIE as
30286 DW_OP_GNU_variable_value used to reference. */
30287 if (a->dw_attr == DW_AT_string_length
30288 && l
30289 && l->dw_loc_opc == DW_OP_GNU_variable_value
30290 && (l->dw_loc_next == NULL
30291 || (l->dw_loc_next->dw_loc_next == NULL
30292 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30293 {
30294 switch (optimize_string_length (a))
30295 {
30296 case -1:
30297 remove_AT (die, a->dw_attr);
30298 ix--;
30299 /* If we drop DW_AT_string_length, we need to drop also
30300 DW_AT_{string_length_,}byte_size. */
30301 remove_AT_byte_size = true;
30302 continue;
30303 default:
30304 break;
30305 case 1:
30306 /* Even if we keep the optimized DW_AT_string_length,
30307 it might have changed AT_class, so process it again. */
30308 ix--;
30309 continue;
30310 }
30311 }
30312 /* For -gdwarf-2 don't attempt to optimize
30313 DW_AT_data_member_location containing
30314 DW_OP_plus_uconst - older consumers might
30315 rely on it being that op instead of a more complex,
30316 but shorter, location description. */
30317 if ((dwarf_version > 2
30318 || a->dw_attr != DW_AT_data_member_location
30319 || l == NULL
30320 || l->dw_loc_opc != DW_OP_plus_uconst
30321 || l->dw_loc_next != NULL)
30322 && !resolve_addr_in_expr (a, l))
30323 {
30324 if (dwarf_split_debug_info)
30325 remove_loc_list_addr_table_entries (l);
30326 if (l != NULL
30327 && l->dw_loc_next == NULL
30328 && l->dw_loc_opc == DW_OP_addr
30329 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30330 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30331 && a->dw_attr == DW_AT_location)
30332 {
30333 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30334 remove_AT (die, a->dw_attr);
30335 ix--;
30336 optimize_location_into_implicit_ptr (die, decl);
30337 break;
30338 }
30339 if (a->dw_attr == DW_AT_string_length)
30340 /* If we drop DW_AT_string_length, we need to drop also
30341 DW_AT_{string_length_,}byte_size. */
30342 remove_AT_byte_size = true;
30343 remove_AT (die, a->dw_attr);
30344 ix--;
30345 }
30346 else
30347 mark_base_types (l);
30348 }
30349 break;
30350 case dw_val_class_addr:
30351 if (a->dw_attr == DW_AT_const_value
30352 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30353 {
30354 if (AT_index (a) != NOT_INDEXED)
30355 remove_addr_table_entry (a->dw_attr_val.val_entry);
30356 remove_AT (die, a->dw_attr);
30357 ix--;
30358 }
30359 if ((die->die_tag == DW_TAG_call_site
30360 && a->dw_attr == DW_AT_call_origin)
30361 || (die->die_tag == DW_TAG_GNU_call_site
30362 && a->dw_attr == DW_AT_abstract_origin))
30363 {
30364 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30365 dw_die_ref tdie = lookup_decl_die (tdecl);
30366 dw_die_ref cdie;
30367 if (tdie == NULL
30368 && DECL_EXTERNAL (tdecl)
30369 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30370 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30371 {
30372 dw_die_ref pdie = cdie;
30373 /* Make sure we don't add these DIEs into type units.
30374 We could emit skeleton DIEs for context (namespaces,
30375 outer structs/classes) and a skeleton DIE for the
30376 innermost context with DW_AT_signature pointing to the
30377 type unit. See PR78835. */
30378 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30379 pdie = pdie->die_parent;
30380 if (pdie == NULL)
30381 {
30382 /* Creating a full DIE for tdecl is overly expensive and
30383 at this point even wrong when in the LTO phase
30384 as it can end up generating new type DIEs we didn't
30385 output and thus optimize_external_refs will crash. */
30386 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30387 add_AT_flag (tdie, DW_AT_external, 1);
30388 add_AT_flag (tdie, DW_AT_declaration, 1);
30389 add_linkage_attr (tdie, tdecl);
30390 add_name_and_src_coords_attributes (tdie, tdecl, true);
30391 equate_decl_number_to_die (tdecl, tdie);
30392 }
30393 }
30394 if (tdie)
30395 {
30396 a->dw_attr_val.val_class = dw_val_class_die_ref;
30397 a->dw_attr_val.v.val_die_ref.die = tdie;
30398 a->dw_attr_val.v.val_die_ref.external = 0;
30399 }
30400 else
30401 {
30402 if (AT_index (a) != NOT_INDEXED)
30403 remove_addr_table_entry (a->dw_attr_val.val_entry);
30404 remove_AT (die, a->dw_attr);
30405 ix--;
30406 }
30407 }
30408 break;
30409 default:
30410 break;
30411 }
30412
30413 if (remove_AT_byte_size)
30414 remove_AT (die, dwarf_version >= 5
30415 ? DW_AT_string_length_byte_size
30416 : DW_AT_byte_size);
30417
30418 FOR_EACH_CHILD (die, c, resolve_addr (c));
30419 }
30420 \f
30421 /* Helper routines for optimize_location_lists.
30422 This pass tries to share identical local lists in .debug_loc
30423 section. */
30424
30425 /* Iteratively hash operands of LOC opcode into HSTATE. */
30426
30427 static void
30428 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30429 {
30430 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30431 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30432
30433 switch (loc->dw_loc_opc)
30434 {
30435 case DW_OP_const4u:
30436 case DW_OP_const8u:
30437 if (loc->dtprel)
30438 goto hash_addr;
30439 /* FALLTHRU */
30440 case DW_OP_const1u:
30441 case DW_OP_const1s:
30442 case DW_OP_const2u:
30443 case DW_OP_const2s:
30444 case DW_OP_const4s:
30445 case DW_OP_const8s:
30446 case DW_OP_constu:
30447 case DW_OP_consts:
30448 case DW_OP_pick:
30449 case DW_OP_plus_uconst:
30450 case DW_OP_breg0:
30451 case DW_OP_breg1:
30452 case DW_OP_breg2:
30453 case DW_OP_breg3:
30454 case DW_OP_breg4:
30455 case DW_OP_breg5:
30456 case DW_OP_breg6:
30457 case DW_OP_breg7:
30458 case DW_OP_breg8:
30459 case DW_OP_breg9:
30460 case DW_OP_breg10:
30461 case DW_OP_breg11:
30462 case DW_OP_breg12:
30463 case DW_OP_breg13:
30464 case DW_OP_breg14:
30465 case DW_OP_breg15:
30466 case DW_OP_breg16:
30467 case DW_OP_breg17:
30468 case DW_OP_breg18:
30469 case DW_OP_breg19:
30470 case DW_OP_breg20:
30471 case DW_OP_breg21:
30472 case DW_OP_breg22:
30473 case DW_OP_breg23:
30474 case DW_OP_breg24:
30475 case DW_OP_breg25:
30476 case DW_OP_breg26:
30477 case DW_OP_breg27:
30478 case DW_OP_breg28:
30479 case DW_OP_breg29:
30480 case DW_OP_breg30:
30481 case DW_OP_breg31:
30482 case DW_OP_regx:
30483 case DW_OP_fbreg:
30484 case DW_OP_piece:
30485 case DW_OP_deref_size:
30486 case DW_OP_xderef_size:
30487 hstate.add_object (val1->v.val_int);
30488 break;
30489 case DW_OP_skip:
30490 case DW_OP_bra:
30491 {
30492 int offset;
30493
30494 gcc_assert (val1->val_class == dw_val_class_loc);
30495 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30496 hstate.add_object (offset);
30497 }
30498 break;
30499 case DW_OP_implicit_value:
30500 hstate.add_object (val1->v.val_unsigned);
30501 switch (val2->val_class)
30502 {
30503 case dw_val_class_const:
30504 hstate.add_object (val2->v.val_int);
30505 break;
30506 case dw_val_class_vec:
30507 {
30508 unsigned int elt_size = val2->v.val_vec.elt_size;
30509 unsigned int len = val2->v.val_vec.length;
30510
30511 hstate.add_int (elt_size);
30512 hstate.add_int (len);
30513 hstate.add (val2->v.val_vec.array, len * elt_size);
30514 }
30515 break;
30516 case dw_val_class_const_double:
30517 hstate.add_object (val2->v.val_double.low);
30518 hstate.add_object (val2->v.val_double.high);
30519 break;
30520 case dw_val_class_wide_int:
30521 hstate.add (val2->v.val_wide->get_val (),
30522 get_full_len (*val2->v.val_wide)
30523 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30524 break;
30525 case dw_val_class_addr:
30526 inchash::add_rtx (val2->v.val_addr, hstate);
30527 break;
30528 default:
30529 gcc_unreachable ();
30530 }
30531 break;
30532 case DW_OP_bregx:
30533 case DW_OP_bit_piece:
30534 hstate.add_object (val1->v.val_int);
30535 hstate.add_object (val2->v.val_int);
30536 break;
30537 case DW_OP_addr:
30538 hash_addr:
30539 if (loc->dtprel)
30540 {
30541 unsigned char dtprel = 0xd1;
30542 hstate.add_object (dtprel);
30543 }
30544 inchash::add_rtx (val1->v.val_addr, hstate);
30545 break;
30546 case DW_OP_GNU_addr_index:
30547 case DW_OP_addrx:
30548 case DW_OP_GNU_const_index:
30549 case DW_OP_constx:
30550 {
30551 if (loc->dtprel)
30552 {
30553 unsigned char dtprel = 0xd1;
30554 hstate.add_object (dtprel);
30555 }
30556 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30557 }
30558 break;
30559 case DW_OP_implicit_pointer:
30560 case DW_OP_GNU_implicit_pointer:
30561 hstate.add_int (val2->v.val_int);
30562 break;
30563 case DW_OP_entry_value:
30564 case DW_OP_GNU_entry_value:
30565 hstate.add_object (val1->v.val_loc);
30566 break;
30567 case DW_OP_regval_type:
30568 case DW_OP_deref_type:
30569 case DW_OP_GNU_regval_type:
30570 case DW_OP_GNU_deref_type:
30571 {
30572 unsigned int byte_size
30573 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30574 unsigned int encoding
30575 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30576 hstate.add_object (val1->v.val_int);
30577 hstate.add_object (byte_size);
30578 hstate.add_object (encoding);
30579 }
30580 break;
30581 case DW_OP_convert:
30582 case DW_OP_reinterpret:
30583 case DW_OP_GNU_convert:
30584 case DW_OP_GNU_reinterpret:
30585 if (val1->val_class == dw_val_class_unsigned_const)
30586 {
30587 hstate.add_object (val1->v.val_unsigned);
30588 break;
30589 }
30590 /* FALLTHRU */
30591 case DW_OP_const_type:
30592 case DW_OP_GNU_const_type:
30593 {
30594 unsigned int byte_size
30595 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30596 unsigned int encoding
30597 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30598 hstate.add_object (byte_size);
30599 hstate.add_object (encoding);
30600 if (loc->dw_loc_opc != DW_OP_const_type
30601 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30602 break;
30603 hstate.add_object (val2->val_class);
30604 switch (val2->val_class)
30605 {
30606 case dw_val_class_const:
30607 hstate.add_object (val2->v.val_int);
30608 break;
30609 case dw_val_class_vec:
30610 {
30611 unsigned int elt_size = val2->v.val_vec.elt_size;
30612 unsigned int len = val2->v.val_vec.length;
30613
30614 hstate.add_object (elt_size);
30615 hstate.add_object (len);
30616 hstate.add (val2->v.val_vec.array, len * elt_size);
30617 }
30618 break;
30619 case dw_val_class_const_double:
30620 hstate.add_object (val2->v.val_double.low);
30621 hstate.add_object (val2->v.val_double.high);
30622 break;
30623 case dw_val_class_wide_int:
30624 hstate.add (val2->v.val_wide->get_val (),
30625 get_full_len (*val2->v.val_wide)
30626 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30627 break;
30628 default:
30629 gcc_unreachable ();
30630 }
30631 }
30632 break;
30633
30634 default:
30635 /* Other codes have no operands. */
30636 break;
30637 }
30638 }
30639
30640 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30641
30642 static inline void
30643 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30644 {
30645 dw_loc_descr_ref l;
30646 bool sizes_computed = false;
30647 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30648 size_of_locs (loc);
30649
30650 for (l = loc; l != NULL; l = l->dw_loc_next)
30651 {
30652 enum dwarf_location_atom opc = l->dw_loc_opc;
30653 hstate.add_object (opc);
30654 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30655 {
30656 size_of_locs (loc);
30657 sizes_computed = true;
30658 }
30659 hash_loc_operands (l, hstate);
30660 }
30661 }
30662
30663 /* Compute hash of the whole location list LIST_HEAD. */
30664
30665 static inline void
30666 hash_loc_list (dw_loc_list_ref list_head)
30667 {
30668 dw_loc_list_ref curr = list_head;
30669 inchash::hash hstate;
30670
30671 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30672 {
30673 hstate.add (curr->begin, strlen (curr->begin) + 1);
30674 hstate.add (curr->end, strlen (curr->end) + 1);
30675 hstate.add_object (curr->vbegin);
30676 hstate.add_object (curr->vend);
30677 if (curr->section)
30678 hstate.add (curr->section, strlen (curr->section) + 1);
30679 hash_locs (curr->expr, hstate);
30680 }
30681 list_head->hash = hstate.end ();
30682 }
30683
30684 /* Return true if X and Y opcodes have the same operands. */
30685
30686 static inline bool
30687 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30688 {
30689 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30690 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30691 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30692 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30693
30694 switch (x->dw_loc_opc)
30695 {
30696 case DW_OP_const4u:
30697 case DW_OP_const8u:
30698 if (x->dtprel)
30699 goto hash_addr;
30700 /* FALLTHRU */
30701 case DW_OP_const1u:
30702 case DW_OP_const1s:
30703 case DW_OP_const2u:
30704 case DW_OP_const2s:
30705 case DW_OP_const4s:
30706 case DW_OP_const8s:
30707 case DW_OP_constu:
30708 case DW_OP_consts:
30709 case DW_OP_pick:
30710 case DW_OP_plus_uconst:
30711 case DW_OP_breg0:
30712 case DW_OP_breg1:
30713 case DW_OP_breg2:
30714 case DW_OP_breg3:
30715 case DW_OP_breg4:
30716 case DW_OP_breg5:
30717 case DW_OP_breg6:
30718 case DW_OP_breg7:
30719 case DW_OP_breg8:
30720 case DW_OP_breg9:
30721 case DW_OP_breg10:
30722 case DW_OP_breg11:
30723 case DW_OP_breg12:
30724 case DW_OP_breg13:
30725 case DW_OP_breg14:
30726 case DW_OP_breg15:
30727 case DW_OP_breg16:
30728 case DW_OP_breg17:
30729 case DW_OP_breg18:
30730 case DW_OP_breg19:
30731 case DW_OP_breg20:
30732 case DW_OP_breg21:
30733 case DW_OP_breg22:
30734 case DW_OP_breg23:
30735 case DW_OP_breg24:
30736 case DW_OP_breg25:
30737 case DW_OP_breg26:
30738 case DW_OP_breg27:
30739 case DW_OP_breg28:
30740 case DW_OP_breg29:
30741 case DW_OP_breg30:
30742 case DW_OP_breg31:
30743 case DW_OP_regx:
30744 case DW_OP_fbreg:
30745 case DW_OP_piece:
30746 case DW_OP_deref_size:
30747 case DW_OP_xderef_size:
30748 return valx1->v.val_int == valy1->v.val_int;
30749 case DW_OP_skip:
30750 case DW_OP_bra:
30751 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30752 can cause irrelevant differences in dw_loc_addr. */
30753 gcc_assert (valx1->val_class == dw_val_class_loc
30754 && valy1->val_class == dw_val_class_loc
30755 && (dwarf_split_debug_info
30756 || x->dw_loc_addr == y->dw_loc_addr));
30757 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30758 case DW_OP_implicit_value:
30759 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30760 || valx2->val_class != valy2->val_class)
30761 return false;
30762 switch (valx2->val_class)
30763 {
30764 case dw_val_class_const:
30765 return valx2->v.val_int == valy2->v.val_int;
30766 case dw_val_class_vec:
30767 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30768 && valx2->v.val_vec.length == valy2->v.val_vec.length
30769 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30770 valx2->v.val_vec.elt_size
30771 * valx2->v.val_vec.length) == 0;
30772 case dw_val_class_const_double:
30773 return valx2->v.val_double.low == valy2->v.val_double.low
30774 && valx2->v.val_double.high == valy2->v.val_double.high;
30775 case dw_val_class_wide_int:
30776 return *valx2->v.val_wide == *valy2->v.val_wide;
30777 case dw_val_class_addr:
30778 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30779 default:
30780 gcc_unreachable ();
30781 }
30782 case DW_OP_bregx:
30783 case DW_OP_bit_piece:
30784 return valx1->v.val_int == valy1->v.val_int
30785 && valx2->v.val_int == valy2->v.val_int;
30786 case DW_OP_addr:
30787 hash_addr:
30788 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30789 case DW_OP_GNU_addr_index:
30790 case DW_OP_addrx:
30791 case DW_OP_GNU_const_index:
30792 case DW_OP_constx:
30793 {
30794 rtx ax1 = valx1->val_entry->addr.rtl;
30795 rtx ay1 = valy1->val_entry->addr.rtl;
30796 return rtx_equal_p (ax1, ay1);
30797 }
30798 case DW_OP_implicit_pointer:
30799 case DW_OP_GNU_implicit_pointer:
30800 return valx1->val_class == dw_val_class_die_ref
30801 && valx1->val_class == valy1->val_class
30802 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30803 && valx2->v.val_int == valy2->v.val_int;
30804 case DW_OP_entry_value:
30805 case DW_OP_GNU_entry_value:
30806 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30807 case DW_OP_const_type:
30808 case DW_OP_GNU_const_type:
30809 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30810 || valx2->val_class != valy2->val_class)
30811 return false;
30812 switch (valx2->val_class)
30813 {
30814 case dw_val_class_const:
30815 return valx2->v.val_int == valy2->v.val_int;
30816 case dw_val_class_vec:
30817 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30818 && valx2->v.val_vec.length == valy2->v.val_vec.length
30819 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30820 valx2->v.val_vec.elt_size
30821 * valx2->v.val_vec.length) == 0;
30822 case dw_val_class_const_double:
30823 return valx2->v.val_double.low == valy2->v.val_double.low
30824 && valx2->v.val_double.high == valy2->v.val_double.high;
30825 case dw_val_class_wide_int:
30826 return *valx2->v.val_wide == *valy2->v.val_wide;
30827 default:
30828 gcc_unreachable ();
30829 }
30830 case DW_OP_regval_type:
30831 case DW_OP_deref_type:
30832 case DW_OP_GNU_regval_type:
30833 case DW_OP_GNU_deref_type:
30834 return valx1->v.val_int == valy1->v.val_int
30835 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30836 case DW_OP_convert:
30837 case DW_OP_reinterpret:
30838 case DW_OP_GNU_convert:
30839 case DW_OP_GNU_reinterpret:
30840 if (valx1->val_class != valy1->val_class)
30841 return false;
30842 if (valx1->val_class == dw_val_class_unsigned_const)
30843 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30844 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30845 case DW_OP_GNU_parameter_ref:
30846 return valx1->val_class == dw_val_class_die_ref
30847 && valx1->val_class == valy1->val_class
30848 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30849 default:
30850 /* Other codes have no operands. */
30851 return true;
30852 }
30853 }
30854
30855 /* Return true if DWARF location expressions X and Y are the same. */
30856
30857 static inline bool
30858 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30859 {
30860 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30861 if (x->dw_loc_opc != y->dw_loc_opc
30862 || x->dtprel != y->dtprel
30863 || !compare_loc_operands (x, y))
30864 break;
30865 return x == NULL && y == NULL;
30866 }
30867
30868 /* Hashtable helpers. */
30869
30870 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30871 {
30872 static inline hashval_t hash (const dw_loc_list_struct *);
30873 static inline bool equal (const dw_loc_list_struct *,
30874 const dw_loc_list_struct *);
30875 };
30876
30877 /* Return precomputed hash of location list X. */
30878
30879 inline hashval_t
30880 loc_list_hasher::hash (const dw_loc_list_struct *x)
30881 {
30882 return x->hash;
30883 }
30884
30885 /* Return true if location lists A and B are the same. */
30886
30887 inline bool
30888 loc_list_hasher::equal (const dw_loc_list_struct *a,
30889 const dw_loc_list_struct *b)
30890 {
30891 if (a == b)
30892 return 1;
30893 if (a->hash != b->hash)
30894 return 0;
30895 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30896 if (strcmp (a->begin, b->begin) != 0
30897 || strcmp (a->end, b->end) != 0
30898 || (a->section == NULL) != (b->section == NULL)
30899 || (a->section && strcmp (a->section, b->section) != 0)
30900 || a->vbegin != b->vbegin || a->vend != b->vend
30901 || !compare_locs (a->expr, b->expr))
30902 break;
30903 return a == NULL && b == NULL;
30904 }
30905
30906 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30907
30908
30909 /* Recursively optimize location lists referenced from DIE
30910 children and share them whenever possible. */
30911
30912 static void
30913 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30914 {
30915 dw_die_ref c;
30916 dw_attr_node *a;
30917 unsigned ix;
30918 dw_loc_list_struct **slot;
30919 bool drop_locviews = false;
30920 bool has_locviews = false;
30921
30922 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30923 if (AT_class (a) == dw_val_class_loc_list)
30924 {
30925 dw_loc_list_ref list = AT_loc_list (a);
30926 /* TODO: perform some optimizations here, before hashing
30927 it and storing into the hash table. */
30928 hash_loc_list (list);
30929 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30930 if (*slot == NULL)
30931 {
30932 *slot = list;
30933 if (loc_list_has_views (list))
30934 gcc_assert (list->vl_symbol);
30935 else if (list->vl_symbol)
30936 {
30937 drop_locviews = true;
30938 list->vl_symbol = NULL;
30939 }
30940 }
30941 else
30942 {
30943 if (list->vl_symbol && !(*slot)->vl_symbol)
30944 drop_locviews = true;
30945 a->dw_attr_val.v.val_loc_list = *slot;
30946 }
30947 }
30948 else if (AT_class (a) == dw_val_class_view_list)
30949 {
30950 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30951 has_locviews = true;
30952 }
30953
30954
30955 if (drop_locviews && has_locviews)
30956 remove_AT (die, DW_AT_GNU_locviews);
30957
30958 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
30959 }
30960
30961
30962 /* Recursively assign each location list a unique index into the debug_addr
30963 section. */
30964
30965 static void
30966 index_location_lists (dw_die_ref die)
30967 {
30968 dw_die_ref c;
30969 dw_attr_node *a;
30970 unsigned ix;
30971
30972 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30973 if (AT_class (a) == dw_val_class_loc_list)
30974 {
30975 dw_loc_list_ref list = AT_loc_list (a);
30976 dw_loc_list_ref curr;
30977 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
30978 {
30979 /* Don't index an entry that has already been indexed
30980 or won't be output. Make sure skip_loc_list_entry doesn't
30981 call size_of_locs, because that might cause circular dependency,
30982 index_location_lists requiring address table indexes to be
30983 computed, but adding new indexes through add_addr_table_entry
30984 and address table index computation requiring no new additions
30985 to the hash table. In the rare case of DWARF[234] >= 64KB
30986 location expression, we'll just waste unused address table entry
30987 for it. */
30988 if (curr->begin_entry != NULL
30989 || skip_loc_list_entry (curr))
30990 continue;
30991
30992 curr->begin_entry
30993 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
30994 }
30995 }
30996
30997 FOR_EACH_CHILD (die, c, index_location_lists (c));
30998 }
30999
31000 /* Optimize location lists referenced from DIE
31001 children and share them whenever possible. */
31002
31003 static void
31004 optimize_location_lists (dw_die_ref die)
31005 {
31006 loc_list_hash_type htab (500);
31007 optimize_location_lists_1 (die, &htab);
31008 }
31009 \f
31010 /* Traverse the limbo die list, and add parent/child links. The only
31011 dies without parents that should be here are concrete instances of
31012 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31013 For concrete instances, we can get the parent die from the abstract
31014 instance. */
31015
31016 static void
31017 flush_limbo_die_list (void)
31018 {
31019 limbo_die_node *node;
31020
31021 /* get_context_die calls force_decl_die, which can put new DIEs on the
31022 limbo list in LTO mode when nested functions are put in a different
31023 partition than that of their parent function. */
31024 while ((node = limbo_die_list))
31025 {
31026 dw_die_ref die = node->die;
31027 limbo_die_list = node->next;
31028
31029 if (die->die_parent == NULL)
31030 {
31031 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31032
31033 if (origin && origin->die_parent)
31034 add_child_die (origin->die_parent, die);
31035 else if (is_cu_die (die))
31036 ;
31037 else if (seen_error ())
31038 /* It's OK to be confused by errors in the input. */
31039 add_child_die (comp_unit_die (), die);
31040 else
31041 {
31042 /* In certain situations, the lexical block containing a
31043 nested function can be optimized away, which results
31044 in the nested function die being orphaned. Likewise
31045 with the return type of that nested function. Force
31046 this to be a child of the containing function.
31047
31048 It may happen that even the containing function got fully
31049 inlined and optimized out. In that case we are lost and
31050 assign the empty child. This should not be big issue as
31051 the function is likely unreachable too. */
31052 gcc_assert (node->created_for);
31053
31054 if (DECL_P (node->created_for))
31055 origin = get_context_die (DECL_CONTEXT (node->created_for));
31056 else if (TYPE_P (node->created_for))
31057 origin = scope_die_for (node->created_for, comp_unit_die ());
31058 else
31059 origin = comp_unit_die ();
31060
31061 add_child_die (origin, die);
31062 }
31063 }
31064 }
31065 }
31066
31067 /* Reset DIEs so we can output them again. */
31068
31069 static void
31070 reset_dies (dw_die_ref die)
31071 {
31072 dw_die_ref c;
31073
31074 /* Remove stuff we re-generate. */
31075 die->die_mark = 0;
31076 die->die_offset = 0;
31077 die->die_abbrev = 0;
31078 remove_AT (die, DW_AT_sibling);
31079
31080 FOR_EACH_CHILD (die, c, reset_dies (c));
31081 }
31082
31083 /* Output stuff that dwarf requires at the end of every file,
31084 and generate the DWARF-2 debugging info. */
31085
31086 static void
31087 dwarf2out_finish (const char *)
31088 {
31089 comdat_type_node *ctnode;
31090 dw_die_ref main_comp_unit_die;
31091 unsigned char checksum[16];
31092 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31093
31094 /* Flush out any latecomers to the limbo party. */
31095 flush_limbo_die_list ();
31096
31097 if (inline_entry_data_table)
31098 gcc_assert (inline_entry_data_table->elements () == 0);
31099
31100 if (flag_checking)
31101 {
31102 verify_die (comp_unit_die ());
31103 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31104 verify_die (node->die);
31105 }
31106
31107 /* We shouldn't have any symbols with delayed asm names for
31108 DIEs generated after early finish. */
31109 gcc_assert (deferred_asm_name == NULL);
31110
31111 gen_remaining_tmpl_value_param_die_attribute ();
31112
31113 if (flag_generate_lto || flag_generate_offload)
31114 {
31115 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31116
31117 /* Prune stuff so that dwarf2out_finish runs successfully
31118 for the fat part of the object. */
31119 reset_dies (comp_unit_die ());
31120 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31121 reset_dies (node->die);
31122
31123 hash_table<comdat_type_hasher> comdat_type_table (100);
31124 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31125 {
31126 comdat_type_node **slot
31127 = comdat_type_table.find_slot (ctnode, INSERT);
31128
31129 /* Don't reset types twice. */
31130 if (*slot != HTAB_EMPTY_ENTRY)
31131 continue;
31132
31133 /* Remove the pointer to the line table. */
31134 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31135
31136 if (debug_info_level >= DINFO_LEVEL_TERSE)
31137 reset_dies (ctnode->root_die);
31138
31139 *slot = ctnode;
31140 }
31141
31142 /* Reset die CU symbol so we don't output it twice. */
31143 comp_unit_die ()->die_id.die_symbol = NULL;
31144
31145 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31146 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31147 if (have_macinfo)
31148 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31149
31150 /* Remove indirect string decisions. */
31151 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31152 }
31153
31154 #if ENABLE_ASSERT_CHECKING
31155 {
31156 dw_die_ref die = comp_unit_die (), c;
31157 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31158 }
31159 #endif
31160 resolve_addr (comp_unit_die ());
31161 move_marked_base_types ();
31162
31163 /* Initialize sections and labels used for actual assembler output. */
31164 unsigned generation = init_sections_and_labels (false);
31165
31166 /* Traverse the DIE's and add sibling attributes to those DIE's that
31167 have children. */
31168 add_sibling_attributes (comp_unit_die ());
31169 limbo_die_node *node;
31170 for (node = cu_die_list; node; node = node->next)
31171 add_sibling_attributes (node->die);
31172 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31173 add_sibling_attributes (ctnode->root_die);
31174
31175 /* When splitting DWARF info, we put some attributes in the
31176 skeleton compile_unit DIE that remains in the .o, while
31177 most attributes go in the DWO compile_unit_die. */
31178 if (dwarf_split_debug_info)
31179 {
31180 limbo_die_node *cu;
31181 main_comp_unit_die = gen_compile_unit_die (NULL);
31182 if (dwarf_version >= 5)
31183 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31184 cu = limbo_die_list;
31185 gcc_assert (cu->die == main_comp_unit_die);
31186 limbo_die_list = limbo_die_list->next;
31187 cu->next = cu_die_list;
31188 cu_die_list = cu;
31189 }
31190 else
31191 main_comp_unit_die = comp_unit_die ();
31192
31193 /* Output a terminator label for the .text section. */
31194 switch_to_section (text_section);
31195 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31196 if (cold_text_section)
31197 {
31198 switch_to_section (cold_text_section);
31199 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31200 }
31201
31202 /* We can only use the low/high_pc attributes if all of the code was
31203 in .text. */
31204 if (!have_multiple_function_sections
31205 || (dwarf_version < 3 && dwarf_strict))
31206 {
31207 /* Don't add if the CU has no associated code. */
31208 if (text_section_used)
31209 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31210 text_end_label, true);
31211 }
31212 else
31213 {
31214 unsigned fde_idx;
31215 dw_fde_ref fde;
31216 bool range_list_added = false;
31217
31218 if (text_section_used)
31219 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31220 text_end_label, &range_list_added, true);
31221 if (cold_text_section_used)
31222 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31223 cold_end_label, &range_list_added, true);
31224
31225 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31226 {
31227 if (DECL_IGNORED_P (fde->decl))
31228 continue;
31229 if (!fde->in_std_section)
31230 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31231 fde->dw_fde_end, &range_list_added,
31232 true);
31233 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31234 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31235 fde->dw_fde_second_end, &range_list_added,
31236 true);
31237 }
31238
31239 if (range_list_added)
31240 {
31241 /* We need to give .debug_loc and .debug_ranges an appropriate
31242 "base address". Use zero so that these addresses become
31243 absolute. Historically, we've emitted the unexpected
31244 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31245 Emit both to give time for other tools to adapt. */
31246 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31247 if (! dwarf_strict && dwarf_version < 4)
31248 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31249
31250 add_ranges (NULL);
31251 }
31252 }
31253
31254 /* AIX Assembler inserts the length, so adjust the reference to match the
31255 offset expected by debuggers. */
31256 strcpy (dl_section_ref, debug_line_section_label);
31257 if (XCOFF_DEBUGGING_INFO)
31258 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31259
31260 if (debug_info_level >= DINFO_LEVEL_TERSE)
31261 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31262 dl_section_ref);
31263
31264 if (have_macinfo)
31265 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31266 macinfo_section_label);
31267
31268 if (dwarf_split_debug_info)
31269 {
31270 if (have_location_lists)
31271 {
31272 /* Since we generate the loclists in the split DWARF .dwo
31273 file itself, we don't need to generate a loclists_base
31274 attribute for the split compile unit DIE. That attribute
31275 (and using relocatable sec_offset FORMs) isn't allowed
31276 for a split compile unit. Only if the .debug_loclists
31277 section was in the main file, would we need to generate a
31278 loclists_base attribute here (for the full or skeleton
31279 unit DIE). */
31280
31281 /* optimize_location_lists calculates the size of the lists,
31282 so index them first, and assign indices to the entries.
31283 Although optimize_location_lists will remove entries from
31284 the table, it only does so for duplicates, and therefore
31285 only reduces ref_counts to 1. */
31286 index_location_lists (comp_unit_die ());
31287 }
31288
31289 if (addr_index_table != NULL)
31290 {
31291 unsigned int index = 0;
31292 addr_index_table
31293 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31294 (&index);
31295 }
31296 }
31297
31298 loc_list_idx = 0;
31299 if (have_location_lists)
31300 {
31301 optimize_location_lists (comp_unit_die ());
31302 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31303 if (dwarf_version >= 5 && dwarf_split_debug_info)
31304 assign_location_list_indexes (comp_unit_die ());
31305 }
31306
31307 save_macinfo_strings ();
31308
31309 if (dwarf_split_debug_info)
31310 {
31311 unsigned int index = 0;
31312
31313 /* Add attributes common to skeleton compile_units and
31314 type_units. Because these attributes include strings, it
31315 must be done before freezing the string table. Top-level
31316 skeleton die attrs are added when the skeleton type unit is
31317 created, so ensure it is created by this point. */
31318 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31319 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31320 }
31321
31322 /* Output all of the compilation units. We put the main one last so that
31323 the offsets are available to output_pubnames. */
31324 for (node = cu_die_list; node; node = node->next)
31325 output_comp_unit (node->die, 0, NULL);
31326
31327 hash_table<comdat_type_hasher> comdat_type_table (100);
31328 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31329 {
31330 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31331
31332 /* Don't output duplicate types. */
31333 if (*slot != HTAB_EMPTY_ENTRY)
31334 continue;
31335
31336 /* Add a pointer to the line table for the main compilation unit
31337 so that the debugger can make sense of DW_AT_decl_file
31338 attributes. */
31339 if (debug_info_level >= DINFO_LEVEL_TERSE)
31340 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31341 (!dwarf_split_debug_info
31342 ? dl_section_ref
31343 : debug_skeleton_line_section_label));
31344
31345 output_comdat_type_unit (ctnode);
31346 *slot = ctnode;
31347 }
31348
31349 if (dwarf_split_debug_info)
31350 {
31351 int mark;
31352 struct md5_ctx ctx;
31353
31354 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31355 index_rnglists ();
31356
31357 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31358 md5_init_ctx (&ctx);
31359 mark = 0;
31360 die_checksum (comp_unit_die (), &ctx, &mark);
31361 unmark_all_dies (comp_unit_die ());
31362 md5_finish_ctx (&ctx, checksum);
31363
31364 if (dwarf_version < 5)
31365 {
31366 /* Use the first 8 bytes of the checksum as the dwo_id,
31367 and add it to both comp-unit DIEs. */
31368 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31369 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31370 }
31371
31372 /* Add the base offset of the ranges table to the skeleton
31373 comp-unit DIE. */
31374 if (!vec_safe_is_empty (ranges_table))
31375 {
31376 if (dwarf_version >= 5)
31377 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31378 ranges_base_label);
31379 else
31380 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31381 ranges_section_label);
31382 }
31383
31384 switch_to_section (debug_addr_section);
31385 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31386 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31387 before DWARF5, didn't have a header for .debug_addr units.
31388 DWARF5 specifies a small header when address tables are used. */
31389 if (dwarf_version >= 5)
31390 {
31391 unsigned int last_idx = 0;
31392 unsigned long addrs_length;
31393
31394 addr_index_table->traverse_noresize
31395 <unsigned int *, count_index_addrs> (&last_idx);
31396 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31397
31398 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31399 dw2_asm_output_data (4, 0xffffffff,
31400 "Escape value for 64-bit DWARF extension");
31401 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31402 "Length of Address Unit");
31403 dw2_asm_output_data (2, 5, "DWARF addr version");
31404 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31405 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31406 }
31407 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31408 output_addr_table ();
31409 }
31410
31411 /* Output the main compilation unit if non-empty or if .debug_macinfo
31412 or .debug_macro will be emitted. */
31413 output_comp_unit (comp_unit_die (), have_macinfo,
31414 dwarf_split_debug_info ? checksum : NULL);
31415
31416 if (dwarf_split_debug_info && info_section_emitted)
31417 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31418
31419 /* Output the abbreviation table. */
31420 if (vec_safe_length (abbrev_die_table) != 1)
31421 {
31422 switch_to_section (debug_abbrev_section);
31423 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31424 output_abbrev_section ();
31425 }
31426
31427 /* Output location list section if necessary. */
31428 if (have_location_lists)
31429 {
31430 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31431 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31432 /* Output the location lists info. */
31433 switch_to_section (debug_loc_section);
31434 if (dwarf_version >= 5)
31435 {
31436 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 1);
31437 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 2);
31438 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31439 dw2_asm_output_data (4, 0xffffffff,
31440 "Initial length escape value indicating "
31441 "64-bit DWARF extension");
31442 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31443 "Length of Location Lists");
31444 ASM_OUTPUT_LABEL (asm_out_file, l1);
31445 output_dwarf_version ();
31446 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31447 dw2_asm_output_data (1, 0, "Segment Size");
31448 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31449 "Offset Entry Count");
31450 }
31451 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31452 if (dwarf_version >= 5 && dwarf_split_debug_info)
31453 {
31454 unsigned int save_loc_list_idx = loc_list_idx;
31455 loc_list_idx = 0;
31456 output_loclists_offsets (comp_unit_die ());
31457 gcc_assert (save_loc_list_idx == loc_list_idx);
31458 }
31459 output_location_lists (comp_unit_die ());
31460 if (dwarf_version >= 5)
31461 ASM_OUTPUT_LABEL (asm_out_file, l2);
31462 }
31463
31464 output_pubtables ();
31465
31466 /* Output the address range information if a CU (.debug_info section)
31467 was emitted. We output an empty table even if we had no functions
31468 to put in it. This because the consumer has no way to tell the
31469 difference between an empty table that we omitted and failure to
31470 generate a table that would have contained data. */
31471 if (info_section_emitted)
31472 {
31473 switch_to_section (debug_aranges_section);
31474 output_aranges ();
31475 }
31476
31477 /* Output ranges section if necessary. */
31478 if (!vec_safe_is_empty (ranges_table))
31479 {
31480 if (dwarf_version >= 5)
31481 output_rnglists (generation);
31482 else
31483 output_ranges ();
31484 }
31485
31486 /* Have to end the macro section. */
31487 if (have_macinfo)
31488 {
31489 switch_to_section (debug_macinfo_section);
31490 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31491 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31492 : debug_skeleton_line_section_label, false);
31493 dw2_asm_output_data (1, 0, "End compilation unit");
31494 }
31495
31496 /* Output the source line correspondence table. We must do this
31497 even if there is no line information. Otherwise, on an empty
31498 translation unit, we will generate a present, but empty,
31499 .debug_info section. IRIX 6.5 `nm' will then complain when
31500 examining the file. This is done late so that any filenames
31501 used by the debug_info section are marked as 'used'. */
31502 switch_to_section (debug_line_section);
31503 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31504 if (! output_asm_line_debug_info ())
31505 output_line_info (false);
31506
31507 if (dwarf_split_debug_info && info_section_emitted)
31508 {
31509 switch_to_section (debug_skeleton_line_section);
31510 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31511 output_line_info (true);
31512 }
31513
31514 /* If we emitted any indirect strings, output the string table too. */
31515 if (debug_str_hash || skeleton_debug_str_hash)
31516 output_indirect_strings ();
31517 if (debug_line_str_hash)
31518 {
31519 switch_to_section (debug_line_str_section);
31520 const enum dwarf_form form = DW_FORM_line_strp;
31521 debug_line_str_hash->traverse<enum dwarf_form,
31522 output_indirect_string> (form);
31523 }
31524
31525 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31526 symview_upper_bound = 0;
31527 if (zero_view_p)
31528 bitmap_clear (zero_view_p);
31529 }
31530
31531 /* Returns a hash value for X (which really is a variable_value_struct). */
31532
31533 inline hashval_t
31534 variable_value_hasher::hash (variable_value_struct *x)
31535 {
31536 return (hashval_t) x->decl_id;
31537 }
31538
31539 /* Return nonzero if decl_id of variable_value_struct X is the same as
31540 UID of decl Y. */
31541
31542 inline bool
31543 variable_value_hasher::equal (variable_value_struct *x, tree y)
31544 {
31545 return x->decl_id == DECL_UID (y);
31546 }
31547
31548 /* Helper function for resolve_variable_value, handle
31549 DW_OP_GNU_variable_value in one location expression.
31550 Return true if exprloc has been changed into loclist. */
31551
31552 static bool
31553 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31554 {
31555 dw_loc_descr_ref next;
31556 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31557 {
31558 next = loc->dw_loc_next;
31559 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31560 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31561 continue;
31562
31563 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31564 if (DECL_CONTEXT (decl) != current_function_decl)
31565 continue;
31566
31567 dw_die_ref ref = lookup_decl_die (decl);
31568 if (ref)
31569 {
31570 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31571 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31572 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31573 continue;
31574 }
31575 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31576 if (l == NULL)
31577 continue;
31578 if (l->dw_loc_next)
31579 {
31580 if (AT_class (a) != dw_val_class_loc)
31581 continue;
31582 switch (a->dw_attr)
31583 {
31584 /* Following attributes allow both exprloc and loclist
31585 classes, so we can change them into a loclist. */
31586 case DW_AT_location:
31587 case DW_AT_string_length:
31588 case DW_AT_return_addr:
31589 case DW_AT_data_member_location:
31590 case DW_AT_frame_base:
31591 case DW_AT_segment:
31592 case DW_AT_static_link:
31593 case DW_AT_use_location:
31594 case DW_AT_vtable_elem_location:
31595 if (prev)
31596 {
31597 prev->dw_loc_next = NULL;
31598 prepend_loc_descr_to_each (l, AT_loc (a));
31599 }
31600 if (next)
31601 add_loc_descr_to_each (l, next);
31602 a->dw_attr_val.val_class = dw_val_class_loc_list;
31603 a->dw_attr_val.val_entry = NULL;
31604 a->dw_attr_val.v.val_loc_list = l;
31605 have_location_lists = true;
31606 return true;
31607 /* Following attributes allow both exprloc and reference,
31608 so if the whole expression is DW_OP_GNU_variable_value alone
31609 we could transform it into reference. */
31610 case DW_AT_byte_size:
31611 case DW_AT_bit_size:
31612 case DW_AT_lower_bound:
31613 case DW_AT_upper_bound:
31614 case DW_AT_bit_stride:
31615 case DW_AT_count:
31616 case DW_AT_allocated:
31617 case DW_AT_associated:
31618 case DW_AT_byte_stride:
31619 if (prev == NULL && next == NULL)
31620 break;
31621 /* FALLTHRU */
31622 default:
31623 if (dwarf_strict)
31624 continue;
31625 break;
31626 }
31627 /* Create DW_TAG_variable that we can refer to. */
31628 gen_decl_die (decl, NULL_TREE, NULL,
31629 lookup_decl_die (current_function_decl));
31630 ref = lookup_decl_die (decl);
31631 if (ref)
31632 {
31633 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31634 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31635 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31636 }
31637 continue;
31638 }
31639 if (prev)
31640 {
31641 prev->dw_loc_next = l->expr;
31642 add_loc_descr (&prev->dw_loc_next, next);
31643 free_loc_descr (loc, NULL);
31644 next = prev->dw_loc_next;
31645 }
31646 else
31647 {
31648 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31649 add_loc_descr (&loc, next);
31650 next = loc;
31651 }
31652 loc = prev;
31653 }
31654 return false;
31655 }
31656
31657 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31658
31659 static void
31660 resolve_variable_value (dw_die_ref die)
31661 {
31662 dw_attr_node *a;
31663 dw_loc_list_ref loc;
31664 unsigned ix;
31665
31666 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31667 switch (AT_class (a))
31668 {
31669 case dw_val_class_loc:
31670 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31671 break;
31672 /* FALLTHRU */
31673 case dw_val_class_loc_list:
31674 loc = AT_loc_list (a);
31675 gcc_assert (loc);
31676 for (; loc; loc = loc->dw_loc_next)
31677 resolve_variable_value_in_expr (a, loc->expr);
31678 break;
31679 default:
31680 break;
31681 }
31682 }
31683
31684 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31685 temporaries in the current function. */
31686
31687 static void
31688 resolve_variable_values (void)
31689 {
31690 if (!variable_value_hash || !current_function_decl)
31691 return;
31692
31693 struct variable_value_struct *node
31694 = variable_value_hash->find_with_hash (current_function_decl,
31695 DECL_UID (current_function_decl));
31696
31697 if (node == NULL)
31698 return;
31699
31700 unsigned int i;
31701 dw_die_ref die;
31702 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31703 resolve_variable_value (die);
31704 }
31705
31706 /* Helper function for note_variable_value, handle one location
31707 expression. */
31708
31709 static void
31710 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31711 {
31712 for (; loc; loc = loc->dw_loc_next)
31713 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31714 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31715 {
31716 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31717 dw_die_ref ref = lookup_decl_die (decl);
31718 if (! ref && (flag_generate_lto || flag_generate_offload))
31719 {
31720 /* ??? This is somewhat a hack because we do not create DIEs
31721 for variables not in BLOCK trees early but when generating
31722 early LTO output we need the dw_val_class_decl_ref to be
31723 fully resolved. For fat LTO objects we'd also like to
31724 undo this after LTO dwarf output. */
31725 gcc_assert (DECL_CONTEXT (decl));
31726 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31727 gcc_assert (ctx != NULL);
31728 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31729 ref = lookup_decl_die (decl);
31730 gcc_assert (ref != NULL);
31731 }
31732 if (ref)
31733 {
31734 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31735 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31736 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31737 continue;
31738 }
31739 if (VAR_P (decl)
31740 && DECL_CONTEXT (decl)
31741 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31742 && lookup_decl_die (DECL_CONTEXT (decl)))
31743 {
31744 if (!variable_value_hash)
31745 variable_value_hash
31746 = hash_table<variable_value_hasher>::create_ggc (10);
31747
31748 tree fndecl = DECL_CONTEXT (decl);
31749 struct variable_value_struct *node;
31750 struct variable_value_struct **slot
31751 = variable_value_hash->find_slot_with_hash (fndecl,
31752 DECL_UID (fndecl),
31753 INSERT);
31754 if (*slot == NULL)
31755 {
31756 node = ggc_cleared_alloc<variable_value_struct> ();
31757 node->decl_id = DECL_UID (fndecl);
31758 *slot = node;
31759 }
31760 else
31761 node = *slot;
31762
31763 vec_safe_push (node->dies, die);
31764 }
31765 }
31766 }
31767
31768 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31769 with dw_val_class_decl_ref operand. */
31770
31771 static void
31772 note_variable_value (dw_die_ref die)
31773 {
31774 dw_die_ref c;
31775 dw_attr_node *a;
31776 dw_loc_list_ref loc;
31777 unsigned ix;
31778
31779 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31780 switch (AT_class (a))
31781 {
31782 case dw_val_class_loc_list:
31783 loc = AT_loc_list (a);
31784 gcc_assert (loc);
31785 if (!loc->noted_variable_value)
31786 {
31787 loc->noted_variable_value = 1;
31788 for (; loc; loc = loc->dw_loc_next)
31789 note_variable_value_in_expr (die, loc->expr);
31790 }
31791 break;
31792 case dw_val_class_loc:
31793 note_variable_value_in_expr (die, AT_loc (a));
31794 break;
31795 default:
31796 break;
31797 }
31798
31799 /* Mark children. */
31800 FOR_EACH_CHILD (die, c, note_variable_value (c));
31801 }
31802
31803 /* Perform any cleanups needed after the early debug generation pass
31804 has run. */
31805
31806 static void
31807 dwarf2out_early_finish (const char *filename)
31808 {
31809 set_early_dwarf s;
31810 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31811
31812 /* PCH might result in DW_AT_producer string being restored from the
31813 header compilation, so always fill it with empty string initially
31814 and overwrite only here. */
31815 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31816 producer_string = gen_producer_string ();
31817 producer->dw_attr_val.v.val_str->refcount--;
31818 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31819
31820 /* Add the name for the main input file now. We delayed this from
31821 dwarf2out_init to avoid complications with PCH. */
31822 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31823 add_comp_dir_attribute (comp_unit_die ());
31824
31825 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31826 DW_AT_comp_dir into .debug_line_str section. */
31827 if (!output_asm_line_debug_info ()
31828 && dwarf_version >= 5
31829 && DWARF5_USE_DEBUG_LINE_STR)
31830 {
31831 for (int i = 0; i < 2; i++)
31832 {
31833 dw_attr_node *a = get_AT (comp_unit_die (),
31834 i ? DW_AT_comp_dir : DW_AT_name);
31835 if (a == NULL
31836 || AT_class (a) != dw_val_class_str
31837 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31838 continue;
31839
31840 if (! debug_line_str_hash)
31841 debug_line_str_hash
31842 = hash_table<indirect_string_hasher>::create_ggc (10);
31843
31844 struct indirect_string_node *node
31845 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31846 set_indirect_string (node);
31847 node->form = DW_FORM_line_strp;
31848 a->dw_attr_val.v.val_str->refcount--;
31849 a->dw_attr_val.v.val_str = node;
31850 }
31851 }
31852
31853 /* With LTO early dwarf was really finished at compile-time, so make
31854 sure to adjust the phase after annotating the LTRANS CU DIE. */
31855 if (in_lto_p)
31856 {
31857 early_dwarf_finished = true;
31858 return;
31859 }
31860
31861 /* Walk through the list of incomplete types again, trying once more to
31862 emit full debugging info for them. */
31863 retry_incomplete_types ();
31864
31865 /* The point here is to flush out the limbo list so that it is empty
31866 and we don't need to stream it for LTO. */
31867 flush_limbo_die_list ();
31868
31869 gen_scheduled_generic_parms_dies ();
31870 gen_remaining_tmpl_value_param_die_attribute ();
31871
31872 /* Add DW_AT_linkage_name for all deferred DIEs. */
31873 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31874 {
31875 tree decl = node->created_for;
31876 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31877 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31878 ended up in deferred_asm_name before we knew it was
31879 constant and never written to disk. */
31880 && DECL_ASSEMBLER_NAME (decl))
31881 {
31882 add_linkage_attr (node->die, decl);
31883 move_linkage_attr (node->die);
31884 }
31885 }
31886 deferred_asm_name = NULL;
31887
31888 if (flag_eliminate_unused_debug_types)
31889 prune_unused_types ();
31890
31891 /* Generate separate COMDAT sections for type DIEs. */
31892 if (use_debug_types)
31893 {
31894 break_out_comdat_types (comp_unit_die ());
31895
31896 /* Each new type_unit DIE was added to the limbo die list when created.
31897 Since these have all been added to comdat_type_list, clear the
31898 limbo die list. */
31899 limbo_die_list = NULL;
31900
31901 /* For each new comdat type unit, copy declarations for incomplete
31902 types to make the new unit self-contained (i.e., no direct
31903 references to the main compile unit). */
31904 for (comdat_type_node *ctnode = comdat_type_list;
31905 ctnode != NULL; ctnode = ctnode->next)
31906 copy_decls_for_unworthy_types (ctnode->root_die);
31907 copy_decls_for_unworthy_types (comp_unit_die ());
31908
31909 /* In the process of copying declarations from one unit to another,
31910 we may have left some declarations behind that are no longer
31911 referenced. Prune them. */
31912 prune_unused_types ();
31913 }
31914
31915 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31916 with dw_val_class_decl_ref operand. */
31917 note_variable_value (comp_unit_die ());
31918 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31919 note_variable_value (node->die);
31920 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31921 ctnode = ctnode->next)
31922 note_variable_value (ctnode->root_die);
31923 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31924 note_variable_value (node->die);
31925
31926 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31927 both the main_cu and all skeleton TUs. Making this call unconditional
31928 would end up either adding a second copy of the AT_pubnames attribute, or
31929 requiring a special case in add_top_level_skeleton_die_attrs. */
31930 if (!dwarf_split_debug_info)
31931 add_AT_pubnames (comp_unit_die ());
31932
31933 /* The early debug phase is now finished. */
31934 early_dwarf_finished = true;
31935
31936 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
31937 if ((!flag_generate_lto && !flag_generate_offload)
31938 /* FIXME: Disable debug info generation for PE-COFF targets since the
31939 copy_lto_debug_sections operation of the simple object support in
31940 libiberty is not implemented for them yet. */
31941 || TARGET_PECOFF)
31942 return;
31943
31944 /* Now as we are going to output for LTO initialize sections and labels
31945 to the LTO variants. We don't need a random-seed postfix as other
31946 LTO sections as linking the LTO debug sections into one in a partial
31947 link is fine. */
31948 init_sections_and_labels (true);
31949
31950 /* The output below is modeled after dwarf2out_finish with all
31951 location related output removed and some LTO specific changes.
31952 Some refactoring might make both smaller and easier to match up. */
31953
31954 /* Traverse the DIE's and add add sibling attributes to those DIE's
31955 that have children. */
31956 add_sibling_attributes (comp_unit_die ());
31957 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31958 add_sibling_attributes (node->die);
31959 for (comdat_type_node *ctnode = comdat_type_list;
31960 ctnode != NULL; ctnode = ctnode->next)
31961 add_sibling_attributes (ctnode->root_die);
31962
31963 /* AIX Assembler inserts the length, so adjust the reference to match the
31964 offset expected by debuggers. */
31965 strcpy (dl_section_ref, debug_line_section_label);
31966 if (XCOFF_DEBUGGING_INFO)
31967 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31968
31969 if (debug_info_level >= DINFO_LEVEL_TERSE)
31970 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
31971
31972 if (have_macinfo)
31973 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31974 macinfo_section_label);
31975
31976 save_macinfo_strings ();
31977
31978 if (dwarf_split_debug_info)
31979 {
31980 unsigned int index = 0;
31981 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31982 }
31983
31984 /* Output all of the compilation units. We put the main one last so that
31985 the offsets are available to output_pubnames. */
31986 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31987 output_comp_unit (node->die, 0, NULL);
31988
31989 hash_table<comdat_type_hasher> comdat_type_table (100);
31990 for (comdat_type_node *ctnode = comdat_type_list;
31991 ctnode != NULL; ctnode = ctnode->next)
31992 {
31993 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31994
31995 /* Don't output duplicate types. */
31996 if (*slot != HTAB_EMPTY_ENTRY)
31997 continue;
31998
31999 /* Add a pointer to the line table for the main compilation unit
32000 so that the debugger can make sense of DW_AT_decl_file
32001 attributes. */
32002 if (debug_info_level >= DINFO_LEVEL_TERSE)
32003 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32004 (!dwarf_split_debug_info
32005 ? debug_line_section_label
32006 : debug_skeleton_line_section_label));
32007
32008 output_comdat_type_unit (ctnode);
32009 *slot = ctnode;
32010 }
32011
32012 /* Stick a unique symbol to the main debuginfo section. */
32013 compute_comp_unit_symbol (comp_unit_die ());
32014
32015 /* Output the main compilation unit. We always need it if only for
32016 the CU symbol. */
32017 output_comp_unit (comp_unit_die (), true, NULL);
32018
32019 /* Output the abbreviation table. */
32020 if (vec_safe_length (abbrev_die_table) != 1)
32021 {
32022 switch_to_section (debug_abbrev_section);
32023 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32024 output_abbrev_section ();
32025 }
32026
32027 /* Have to end the macro section. */
32028 if (have_macinfo)
32029 {
32030 /* We have to save macinfo state if we need to output it again
32031 for the FAT part of the object. */
32032 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32033 if (flag_fat_lto_objects)
32034 macinfo_table = macinfo_table->copy ();
32035
32036 switch_to_section (debug_macinfo_section);
32037 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32038 output_macinfo (debug_line_section_label, true);
32039 dw2_asm_output_data (1, 0, "End compilation unit");
32040
32041 if (flag_fat_lto_objects)
32042 {
32043 vec_free (macinfo_table);
32044 macinfo_table = saved_macinfo_table;
32045 }
32046 }
32047
32048 /* Emit a skeleton debug_line section. */
32049 switch_to_section (debug_line_section);
32050 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32051 output_line_info (true);
32052
32053 /* If we emitted any indirect strings, output the string table too. */
32054 if (debug_str_hash || skeleton_debug_str_hash)
32055 output_indirect_strings ();
32056
32057 /* Switch back to the text section. */
32058 switch_to_section (text_section);
32059 }
32060
32061 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32062 within the same process. For use by toplev::finalize. */
32063
32064 void
32065 dwarf2out_c_finalize (void)
32066 {
32067 last_var_location_insn = NULL;
32068 cached_next_real_insn = NULL;
32069 used_rtx_array = NULL;
32070 incomplete_types = NULL;
32071 debug_info_section = NULL;
32072 debug_skeleton_info_section = NULL;
32073 debug_abbrev_section = NULL;
32074 debug_skeleton_abbrev_section = NULL;
32075 debug_aranges_section = NULL;
32076 debug_addr_section = NULL;
32077 debug_macinfo_section = NULL;
32078 debug_line_section = NULL;
32079 debug_skeleton_line_section = NULL;
32080 debug_loc_section = NULL;
32081 debug_pubnames_section = NULL;
32082 debug_pubtypes_section = NULL;
32083 debug_str_section = NULL;
32084 debug_line_str_section = NULL;
32085 debug_str_dwo_section = NULL;
32086 debug_str_offsets_section = NULL;
32087 debug_ranges_section = NULL;
32088 debug_frame_section = NULL;
32089 fde_vec = NULL;
32090 debug_str_hash = NULL;
32091 debug_line_str_hash = NULL;
32092 skeleton_debug_str_hash = NULL;
32093 dw2_string_counter = 0;
32094 have_multiple_function_sections = false;
32095 text_section_used = false;
32096 cold_text_section_used = false;
32097 cold_text_section = NULL;
32098 current_unit_personality = NULL;
32099
32100 early_dwarf = false;
32101 early_dwarf_finished = false;
32102
32103 next_die_offset = 0;
32104 single_comp_unit_die = NULL;
32105 comdat_type_list = NULL;
32106 limbo_die_list = NULL;
32107 file_table = NULL;
32108 decl_die_table = NULL;
32109 common_block_die_table = NULL;
32110 decl_loc_table = NULL;
32111 call_arg_locations = NULL;
32112 call_arg_loc_last = NULL;
32113 call_site_count = -1;
32114 tail_call_site_count = -1;
32115 cached_dw_loc_list_table = NULL;
32116 abbrev_die_table = NULL;
32117 delete dwarf_proc_stack_usage_map;
32118 dwarf_proc_stack_usage_map = NULL;
32119 line_info_label_num = 0;
32120 cur_line_info_table = NULL;
32121 text_section_line_info = NULL;
32122 cold_text_section_line_info = NULL;
32123 separate_line_info = NULL;
32124 info_section_emitted = false;
32125 pubname_table = NULL;
32126 pubtype_table = NULL;
32127 macinfo_table = NULL;
32128 ranges_table = NULL;
32129 ranges_by_label = NULL;
32130 rnglist_idx = 0;
32131 have_location_lists = false;
32132 loclabel_num = 0;
32133 poc_label_num = 0;
32134 last_emitted_file = NULL;
32135 label_num = 0;
32136 tmpl_value_parm_die_table = NULL;
32137 generic_type_instances = NULL;
32138 frame_pointer_fb_offset = 0;
32139 frame_pointer_fb_offset_valid = false;
32140 base_types.release ();
32141 XDELETEVEC (producer_string);
32142 producer_string = NULL;
32143 }
32144
32145 #include "gt-dwarf2out.h"