DWARF: Use DW_OP_addrx and DW_OP_constx for DWARF5.
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105
106 #ifndef XCOFF_DEBUGGING_INFO
107 #define XCOFF_DEBUGGING_INFO 0
108 #endif
109
110 #ifndef HAVE_XCOFF_DWARF_EXTRAS
111 #define HAVE_XCOFF_DWARF_EXTRAS 0
112 #endif
113
114 #ifdef VMS_DEBUGGING_INFO
115 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
116
117 /* Define this macro to be a nonzero value if the directory specifications
118 which are output in the debug info should end with a separator. */
119 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
120 /* Define this macro to evaluate to a nonzero value if GCC should refrain
121 from generating indirect strings in DWARF2 debug information, for instance
122 if your target is stuck with an old version of GDB that is unable to
123 process them properly or uses VMS Debug. */
124 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
125 #else
126 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
127 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
128 #endif
129
130 /* ??? Poison these here until it can be done generically. They've been
131 totally replaced in this file; make sure it stays that way. */
132 #undef DWARF2_UNWIND_INFO
133 #undef DWARF2_FRAME_INFO
134 #if (GCC_VERSION >= 3000)
135 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
136 #endif
137
138 /* The size of the target's pointer type. */
139 #ifndef PTR_SIZE
140 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
141 #endif
142
143 /* Array of RTXes referenced by the debugging information, which therefore
144 must be kept around forever. */
145 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
146
147 /* A pointer to the base of a list of incomplete types which might be
148 completed at some later time. incomplete_types_list needs to be a
149 vec<tree, va_gc> *because we want to tell the garbage collector about
150 it. */
151 static GTY(()) vec<tree, va_gc> *incomplete_types;
152
153 /* A pointer to the base of a table of references to declaration
154 scopes. This table is a display which tracks the nesting
155 of declaration scopes at the current scope and containing
156 scopes. This table is used to find the proper place to
157 define type declaration DIE's. */
158 static GTY(()) vec<tree, va_gc> *decl_scope_table;
159
160 /* Pointers to various DWARF2 sections. */
161 static GTY(()) section *debug_info_section;
162 static GTY(()) section *debug_skeleton_info_section;
163 static GTY(()) section *debug_abbrev_section;
164 static GTY(()) section *debug_skeleton_abbrev_section;
165 static GTY(()) section *debug_aranges_section;
166 static GTY(()) section *debug_addr_section;
167 static GTY(()) section *debug_macinfo_section;
168 static const char *debug_macinfo_section_name;
169 static unsigned macinfo_label_base = 1;
170 static GTY(()) section *debug_line_section;
171 static GTY(()) section *debug_skeleton_line_section;
172 static GTY(()) section *debug_loc_section;
173 static GTY(()) section *debug_pubnames_section;
174 static GTY(()) section *debug_pubtypes_section;
175 static GTY(()) section *debug_str_section;
176 static GTY(()) section *debug_line_str_section;
177 static GTY(()) section *debug_str_dwo_section;
178 static GTY(()) section *debug_str_offsets_section;
179 static GTY(()) section *debug_ranges_section;
180 static GTY(()) section *debug_frame_section;
181
182 /* Maximum size (in bytes) of an artificially generated label. */
183 #define MAX_ARTIFICIAL_LABEL_BYTES 40
184
185 /* According to the (draft) DWARF 3 specification, the initial length
186 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
187 bytes are 0xffffffff, followed by the length stored in the next 8
188 bytes.
189
190 However, the SGI/MIPS ABI uses an initial length which is equal to
191 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
192
193 #ifndef DWARF_INITIAL_LENGTH_SIZE
194 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
195 #endif
196
197 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
198 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
199 #endif
200
201 /* Round SIZE up to the nearest BOUNDARY. */
202 #define DWARF_ROUND(SIZE,BOUNDARY) \
203 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
204
205 /* CIE identifier. */
206 #if HOST_BITS_PER_WIDE_INT >= 64
207 #define DWARF_CIE_ID \
208 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
209 #else
210 #define DWARF_CIE_ID DW_CIE_ID
211 #endif
212
213
214 /* A vector for a table that contains frame description
215 information for each routine. */
216 #define NOT_INDEXED (-1U)
217 #define NO_INDEX_ASSIGNED (-2U)
218
219 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
220
221 struct GTY((for_user)) indirect_string_node {
222 const char *str;
223 unsigned int refcount;
224 enum dwarf_form form;
225 char *label;
226 unsigned int index;
227 };
228
229 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
230 {
231 typedef const char *compare_type;
232
233 static hashval_t hash (indirect_string_node *);
234 static bool equal (indirect_string_node *, const char *);
235 };
236
237 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
238
239 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
240
241 /* With split_debug_info, both the comp_dir and dwo_name go in the
242 main object file, rather than the dwo, similar to the force_direct
243 parameter elsewhere but with additional complications:
244
245 1) The string is needed in both the main object file and the dwo.
246 That is, the comp_dir and dwo_name will appear in both places.
247
248 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
249 DW_FORM_line_strp or DW_FORM_GNU_str_index.
250
251 3) GCC chooses the form to use late, depending on the size and
252 reference count.
253
254 Rather than forcing the all debug string handling functions and
255 callers to deal with these complications, simply use a separate,
256 special-cased string table for any attribute that should go in the
257 main object file. This limits the complexity to just the places
258 that need it. */
259
260 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
261
262 static GTY(()) int dw2_string_counter;
263
264 /* True if the compilation unit places functions in more than one section. */
265 static GTY(()) bool have_multiple_function_sections = false;
266
267 /* Whether the default text and cold text sections have been used at all. */
268 static GTY(()) bool text_section_used = false;
269 static GTY(()) bool cold_text_section_used = false;
270
271 /* The default cold text section. */
272 static GTY(()) section *cold_text_section;
273
274 /* The DIE for C++14 'auto' in a function return type. */
275 static GTY(()) dw_die_ref auto_die;
276
277 /* The DIE for C++14 'decltype(auto)' in a function return type. */
278 static GTY(()) dw_die_ref decltype_auto_die;
279
280 /* Forward declarations for functions defined in this file. */
281
282 static void output_call_frame_info (int);
283 static void dwarf2out_note_section_used (void);
284
285 /* Personality decl of current unit. Used only when assembler does not support
286 personality CFI. */
287 static GTY(()) rtx current_unit_personality;
288
289 /* Whether an eh_frame section is required. */
290 static GTY(()) bool do_eh_frame = false;
291
292 /* .debug_rnglists next index. */
293 static unsigned int rnglist_idx;
294
295 /* Data and reference forms for relocatable data. */
296 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
297 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
298
299 #ifndef DEBUG_FRAME_SECTION
300 #define DEBUG_FRAME_SECTION ".debug_frame"
301 #endif
302
303 #ifndef FUNC_BEGIN_LABEL
304 #define FUNC_BEGIN_LABEL "LFB"
305 #endif
306
307 #ifndef FUNC_END_LABEL
308 #define FUNC_END_LABEL "LFE"
309 #endif
310
311 #ifndef PROLOGUE_END_LABEL
312 #define PROLOGUE_END_LABEL "LPE"
313 #endif
314
315 #ifndef EPILOGUE_BEGIN_LABEL
316 #define EPILOGUE_BEGIN_LABEL "LEB"
317 #endif
318
319 #ifndef FRAME_BEGIN_LABEL
320 #define FRAME_BEGIN_LABEL "Lframe"
321 #endif
322 #define CIE_AFTER_SIZE_LABEL "LSCIE"
323 #define CIE_END_LABEL "LECIE"
324 #define FDE_LABEL "LSFDE"
325 #define FDE_AFTER_SIZE_LABEL "LASFDE"
326 #define FDE_END_LABEL "LEFDE"
327 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
328 #define LINE_NUMBER_END_LABEL "LELT"
329 #define LN_PROLOG_AS_LABEL "LASLTP"
330 #define LN_PROLOG_END_LABEL "LELTP"
331 #define DIE_LABEL_PREFIX "DW"
332 \f
333 /* Match the base name of a file to the base name of a compilation unit. */
334
335 static int
336 matches_main_base (const char *path)
337 {
338 /* Cache the last query. */
339 static const char *last_path = NULL;
340 static int last_match = 0;
341 if (path != last_path)
342 {
343 const char *base;
344 int length = base_of_path (path, &base);
345 last_path = path;
346 last_match = (length == main_input_baselength
347 && memcmp (base, main_input_basename, length) == 0);
348 }
349 return last_match;
350 }
351
352 #ifdef DEBUG_DEBUG_STRUCT
353
354 static int
355 dump_struct_debug (tree type, enum debug_info_usage usage,
356 enum debug_struct_file criterion, int generic,
357 int matches, int result)
358 {
359 /* Find the type name. */
360 tree type_decl = TYPE_STUB_DECL (type);
361 tree t = type_decl;
362 const char *name = 0;
363 if (TREE_CODE (t) == TYPE_DECL)
364 t = DECL_NAME (t);
365 if (t)
366 name = IDENTIFIER_POINTER (t);
367
368 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
369 criterion,
370 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
371 matches ? "bas" : "hdr",
372 generic ? "gen" : "ord",
373 usage == DINFO_USAGE_DFN ? ";" :
374 usage == DINFO_USAGE_DIR_USE ? "." : "*",
375 result,
376 (void*) type_decl, name);
377 return result;
378 }
379 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
380 dump_struct_debug (type, usage, criterion, generic, matches, result)
381
382 #else
383
384 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
385 (result)
386
387 #endif
388
389 /* Get the number of HOST_WIDE_INTs needed to represent the precision
390 of the number. Some constants have a large uniform precision, so
391 we get the precision needed for the actual value of the number. */
392
393 static unsigned int
394 get_full_len (const wide_int &op)
395 {
396 int prec = wi::min_precision (op, UNSIGNED);
397 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
398 / HOST_BITS_PER_WIDE_INT);
399 }
400
401 static bool
402 should_emit_struct_debug (tree type, enum debug_info_usage usage)
403 {
404 enum debug_struct_file criterion;
405 tree type_decl;
406 bool generic = lang_hooks.types.generic_p (type);
407
408 if (generic)
409 criterion = debug_struct_generic[usage];
410 else
411 criterion = debug_struct_ordinary[usage];
412
413 if (criterion == DINFO_STRUCT_FILE_NONE)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
415 if (criterion == DINFO_STRUCT_FILE_ANY)
416 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
417
418 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
419
420 if (type_decl != NULL)
421 {
422 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
423 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
424
425 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
426 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
427 }
428
429 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
430 }
431 \f
432 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
433 switch to the data section instead, and write out a synthetic start label
434 for collect2 the first time around. */
435
436 static void
437 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
438 {
439 if (eh_frame_section == 0)
440 {
441 int flags;
442
443 if (EH_TABLES_CAN_BE_READ_ONLY)
444 {
445 int fde_encoding;
446 int per_encoding;
447 int lsda_encoding;
448
449 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
450 /*global=*/0);
451 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
452 /*global=*/1);
453 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
454 /*global=*/0);
455 flags = ((! flag_pic
456 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
457 && (fde_encoding & 0x70) != DW_EH_PE_aligned
458 && (per_encoding & 0x70) != DW_EH_PE_absptr
459 && (per_encoding & 0x70) != DW_EH_PE_aligned
460 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
461 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
462 ? 0 : SECTION_WRITE);
463 }
464 else
465 flags = SECTION_WRITE;
466
467 #ifdef EH_FRAME_SECTION_NAME
468 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
469 #else
470 eh_frame_section = ((flags == SECTION_WRITE)
471 ? data_section : readonly_data_section);
472 #endif /* EH_FRAME_SECTION_NAME */
473 }
474
475 switch_to_section (eh_frame_section);
476
477 #ifdef EH_FRAME_THROUGH_COLLECT2
478 /* We have no special eh_frame section. Emit special labels to guide
479 collect2. */
480 if (!back)
481 {
482 tree label = get_file_function_name ("F");
483 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
484 targetm.asm_out.globalize_label (asm_out_file,
485 IDENTIFIER_POINTER (label));
486 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
487 }
488 #endif
489 }
490
491 /* Switch [BACK] to the eh or debug frame table section, depending on
492 FOR_EH. */
493
494 static void
495 switch_to_frame_table_section (int for_eh, bool back)
496 {
497 if (for_eh)
498 switch_to_eh_frame_section (back);
499 else
500 {
501 if (!debug_frame_section)
502 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
503 SECTION_DEBUG, NULL);
504 switch_to_section (debug_frame_section);
505 }
506 }
507
508 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
509
510 enum dw_cfi_oprnd_type
511 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
512 {
513 switch (cfi)
514 {
515 case DW_CFA_nop:
516 case DW_CFA_GNU_window_save:
517 case DW_CFA_remember_state:
518 case DW_CFA_restore_state:
519 return dw_cfi_oprnd_unused;
520
521 case DW_CFA_set_loc:
522 case DW_CFA_advance_loc1:
523 case DW_CFA_advance_loc2:
524 case DW_CFA_advance_loc4:
525 case DW_CFA_MIPS_advance_loc8:
526 return dw_cfi_oprnd_addr;
527
528 case DW_CFA_offset:
529 case DW_CFA_offset_extended:
530 case DW_CFA_def_cfa:
531 case DW_CFA_offset_extended_sf:
532 case DW_CFA_def_cfa_sf:
533 case DW_CFA_restore:
534 case DW_CFA_restore_extended:
535 case DW_CFA_undefined:
536 case DW_CFA_same_value:
537 case DW_CFA_def_cfa_register:
538 case DW_CFA_register:
539 case DW_CFA_expression:
540 case DW_CFA_val_expression:
541 return dw_cfi_oprnd_reg_num;
542
543 case DW_CFA_def_cfa_offset:
544 case DW_CFA_GNU_args_size:
545 case DW_CFA_def_cfa_offset_sf:
546 return dw_cfi_oprnd_offset;
547
548 case DW_CFA_def_cfa_expression:
549 return dw_cfi_oprnd_loc;
550
551 default:
552 gcc_unreachable ();
553 }
554 }
555
556 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
557
558 enum dw_cfi_oprnd_type
559 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
560 {
561 switch (cfi)
562 {
563 case DW_CFA_def_cfa:
564 case DW_CFA_def_cfa_sf:
565 case DW_CFA_offset:
566 case DW_CFA_offset_extended_sf:
567 case DW_CFA_offset_extended:
568 return dw_cfi_oprnd_offset;
569
570 case DW_CFA_register:
571 return dw_cfi_oprnd_reg_num;
572
573 case DW_CFA_expression:
574 case DW_CFA_val_expression:
575 return dw_cfi_oprnd_loc;
576
577 case DW_CFA_def_cfa_expression:
578 return dw_cfi_oprnd_cfa_loc;
579
580 default:
581 return dw_cfi_oprnd_unused;
582 }
583 }
584
585 /* Output one FDE. */
586
587 static void
588 output_fde (dw_fde_ref fde, bool for_eh, bool second,
589 char *section_start_label, int fde_encoding, char *augmentation,
590 bool any_lsda_needed, int lsda_encoding)
591 {
592 const char *begin, *end;
593 static unsigned int j;
594 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
595
596 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
597 /* empty */ 0);
598 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
599 for_eh + j);
600 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
601 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
602 if (!XCOFF_DEBUGGING_INFO || for_eh)
603 {
604 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
605 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
606 " indicating 64-bit DWARF extension");
607 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
608 "FDE Length");
609 }
610 ASM_OUTPUT_LABEL (asm_out_file, l1);
611
612 if (for_eh)
613 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
614 else
615 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
616 debug_frame_section, "FDE CIE offset");
617
618 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
619 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
620
621 if (for_eh)
622 {
623 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
624 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
625 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
626 "FDE initial location");
627 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
628 end, begin, "FDE address range");
629 }
630 else
631 {
632 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
633 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
634 }
635
636 if (augmentation[0])
637 {
638 if (any_lsda_needed)
639 {
640 int size = size_of_encoded_value (lsda_encoding);
641
642 if (lsda_encoding == DW_EH_PE_aligned)
643 {
644 int offset = ( 4 /* Length */
645 + 4 /* CIE offset */
646 + 2 * size_of_encoded_value (fde_encoding)
647 + 1 /* Augmentation size */ );
648 int pad = -offset & (PTR_SIZE - 1);
649
650 size += pad;
651 gcc_assert (size_of_uleb128 (size) == 1);
652 }
653
654 dw2_asm_output_data_uleb128 (size, "Augmentation size");
655
656 if (fde->uses_eh_lsda)
657 {
658 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
659 fde->funcdef_number);
660 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
661 gen_rtx_SYMBOL_REF (Pmode, l1),
662 false,
663 "Language Specific Data Area");
664 }
665 else
666 {
667 if (lsda_encoding == DW_EH_PE_aligned)
668 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
669 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
670 "Language Specific Data Area (none)");
671 }
672 }
673 else
674 dw2_asm_output_data_uleb128 (0, "Augmentation size");
675 }
676
677 /* Loop through the Call Frame Instructions associated with this FDE. */
678 fde->dw_fde_current_label = begin;
679 {
680 size_t from, until, i;
681
682 from = 0;
683 until = vec_safe_length (fde->dw_fde_cfi);
684
685 if (fde->dw_fde_second_begin == NULL)
686 ;
687 else if (!second)
688 until = fde->dw_fde_switch_cfi_index;
689 else
690 from = fde->dw_fde_switch_cfi_index;
691
692 for (i = from; i < until; i++)
693 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
694 }
695
696 /* If we are to emit a ref/link from function bodies to their frame tables,
697 do it now. This is typically performed to make sure that tables
698 associated with functions are dragged with them and not discarded in
699 garbage collecting links. We need to do this on a per function basis to
700 cope with -ffunction-sections. */
701
702 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
703 /* Switch to the function section, emit the ref to the tables, and
704 switch *back* into the table section. */
705 switch_to_section (function_section (fde->decl));
706 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
707 switch_to_frame_table_section (for_eh, true);
708 #endif
709
710 /* Pad the FDE out to an address sized boundary. */
711 ASM_OUTPUT_ALIGN (asm_out_file,
712 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
713 ASM_OUTPUT_LABEL (asm_out_file, l2);
714
715 j += 2;
716 }
717
718 /* Return true if frame description entry FDE is needed for EH. */
719
720 static bool
721 fde_needed_for_eh_p (dw_fde_ref fde)
722 {
723 if (flag_asynchronous_unwind_tables)
724 return true;
725
726 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
727 return true;
728
729 if (fde->uses_eh_lsda)
730 return true;
731
732 /* If exceptions are enabled, we have collected nothrow info. */
733 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
734 return false;
735
736 return true;
737 }
738
739 /* Output the call frame information used to record information
740 that relates to calculating the frame pointer, and records the
741 location of saved registers. */
742
743 static void
744 output_call_frame_info (int for_eh)
745 {
746 unsigned int i;
747 dw_fde_ref fde;
748 dw_cfi_ref cfi;
749 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
750 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
751 bool any_lsda_needed = false;
752 char augmentation[6];
753 int augmentation_size;
754 int fde_encoding = DW_EH_PE_absptr;
755 int per_encoding = DW_EH_PE_absptr;
756 int lsda_encoding = DW_EH_PE_absptr;
757 int return_reg;
758 rtx personality = NULL;
759 int dw_cie_version;
760
761 /* Don't emit a CIE if there won't be any FDEs. */
762 if (!fde_vec)
763 return;
764
765 /* Nothing to do if the assembler's doing it all. */
766 if (dwarf2out_do_cfi_asm ())
767 return;
768
769 /* If we don't have any functions we'll want to unwind out of, don't emit
770 any EH unwind information. If we make FDEs linkonce, we may have to
771 emit an empty label for an FDE that wouldn't otherwise be emitted. We
772 want to avoid having an FDE kept around when the function it refers to
773 is discarded. Example where this matters: a primary function template
774 in C++ requires EH information, an explicit specialization doesn't. */
775 if (for_eh)
776 {
777 bool any_eh_needed = false;
778
779 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
780 {
781 if (fde->uses_eh_lsda)
782 any_eh_needed = any_lsda_needed = true;
783 else if (fde_needed_for_eh_p (fde))
784 any_eh_needed = true;
785 else if (TARGET_USES_WEAK_UNWIND_INFO)
786 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
787 }
788
789 if (!any_eh_needed)
790 return;
791 }
792
793 /* We're going to be generating comments, so turn on app. */
794 if (flag_debug_asm)
795 app_enable ();
796
797 /* Switch to the proper frame section, first time. */
798 switch_to_frame_table_section (for_eh, false);
799
800 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
801 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
802
803 /* Output the CIE. */
804 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
805 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
806 if (!XCOFF_DEBUGGING_INFO || for_eh)
807 {
808 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
809 dw2_asm_output_data (4, 0xffffffff,
810 "Initial length escape value indicating 64-bit DWARF extension");
811 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
812 "Length of Common Information Entry");
813 }
814 ASM_OUTPUT_LABEL (asm_out_file, l1);
815
816 /* Now that the CIE pointer is PC-relative for EH,
817 use 0 to identify the CIE. */
818 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
819 (for_eh ? 0 : DWARF_CIE_ID),
820 "CIE Identifier Tag");
821
822 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
823 use CIE version 1, unless that would produce incorrect results
824 due to overflowing the return register column. */
825 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
826 dw_cie_version = 1;
827 if (return_reg >= 256 || dwarf_version > 2)
828 dw_cie_version = 3;
829 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
830
831 augmentation[0] = 0;
832 augmentation_size = 0;
833
834 personality = current_unit_personality;
835 if (for_eh)
836 {
837 char *p;
838
839 /* Augmentation:
840 z Indicates that a uleb128 is present to size the
841 augmentation section.
842 L Indicates the encoding (and thus presence) of
843 an LSDA pointer in the FDE augmentation.
844 R Indicates a non-default pointer encoding for
845 FDE code pointers.
846 P Indicates the presence of an encoding + language
847 personality routine in the CIE augmentation. */
848
849 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
850 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
851 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
852
853 p = augmentation + 1;
854 if (personality)
855 {
856 *p++ = 'P';
857 augmentation_size += 1 + size_of_encoded_value (per_encoding);
858 assemble_external_libcall (personality);
859 }
860 if (any_lsda_needed)
861 {
862 *p++ = 'L';
863 augmentation_size += 1;
864 }
865 if (fde_encoding != DW_EH_PE_absptr)
866 {
867 *p++ = 'R';
868 augmentation_size += 1;
869 }
870 if (p > augmentation + 1)
871 {
872 augmentation[0] = 'z';
873 *p = '\0';
874 }
875
876 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
877 if (personality && per_encoding == DW_EH_PE_aligned)
878 {
879 int offset = ( 4 /* Length */
880 + 4 /* CIE Id */
881 + 1 /* CIE version */
882 + strlen (augmentation) + 1 /* Augmentation */
883 + size_of_uleb128 (1) /* Code alignment */
884 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
885 + 1 /* RA column */
886 + 1 /* Augmentation size */
887 + 1 /* Personality encoding */ );
888 int pad = -offset & (PTR_SIZE - 1);
889
890 augmentation_size += pad;
891
892 /* Augmentations should be small, so there's scarce need to
893 iterate for a solution. Die if we exceed one uleb128 byte. */
894 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
895 }
896 }
897
898 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
899 if (dw_cie_version >= 4)
900 {
901 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
902 dw2_asm_output_data (1, 0, "CIE Segment Size");
903 }
904 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
905 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
906 "CIE Data Alignment Factor");
907
908 if (dw_cie_version == 1)
909 dw2_asm_output_data (1, return_reg, "CIE RA Column");
910 else
911 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
912
913 if (augmentation[0])
914 {
915 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
916 if (personality)
917 {
918 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
919 eh_data_format_name (per_encoding));
920 dw2_asm_output_encoded_addr_rtx (per_encoding,
921 personality,
922 true, NULL);
923 }
924
925 if (any_lsda_needed)
926 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
927 eh_data_format_name (lsda_encoding));
928
929 if (fde_encoding != DW_EH_PE_absptr)
930 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
931 eh_data_format_name (fde_encoding));
932 }
933
934 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
935 output_cfi (cfi, NULL, for_eh);
936
937 /* Pad the CIE out to an address sized boundary. */
938 ASM_OUTPUT_ALIGN (asm_out_file,
939 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
940 ASM_OUTPUT_LABEL (asm_out_file, l2);
941
942 /* Loop through all of the FDE's. */
943 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
944 {
945 unsigned int k;
946
947 /* Don't emit EH unwind info for leaf functions that don't need it. */
948 if (for_eh && !fde_needed_for_eh_p (fde))
949 continue;
950
951 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
952 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
953 augmentation, any_lsda_needed, lsda_encoding);
954 }
955
956 if (for_eh && targetm.terminate_dw2_eh_frame_info)
957 dw2_asm_output_data (4, 0, "End of Table");
958
959 /* Turn off app to make assembly quicker. */
960 if (flag_debug_asm)
961 app_disable ();
962 }
963
964 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
965
966 static void
967 dwarf2out_do_cfi_startproc (bool second)
968 {
969 int enc;
970 rtx ref;
971
972 fprintf (asm_out_file, "\t.cfi_startproc\n");
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting .cfi_personality directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 section *sect;
1223 dw_fde_ref fde = cfun->fde;
1224
1225 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1226
1227 if (!in_cold_section_p)
1228 {
1229 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1230 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1231 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1232 }
1233 else
1234 {
1235 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1236 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1237 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1238 }
1239 have_multiple_function_sections = true;
1240
1241 /* There is no need to mark used sections when not debugging. */
1242 if (cold_text_section != NULL)
1243 dwarf2out_note_section_used ();
1244
1245 if (dwarf2out_do_cfi_asm ())
1246 fprintf (asm_out_file, "\t.cfi_endproc\n");
1247
1248 /* Now do the real section switch. */
1249 sect = current_function_section ();
1250 switch_to_section (sect);
1251
1252 fde->second_in_std_section
1253 = (sect == text_section
1254 || (cold_text_section && sect == cold_text_section));
1255
1256 if (dwarf2out_do_cfi_asm ())
1257 dwarf2out_do_cfi_startproc (true);
1258
1259 var_location_switch_text_section ();
1260
1261 if (cold_text_section != NULL)
1262 set_cur_line_info_table (sect);
1263 }
1264 \f
1265 /* And now, the subset of the debugging information support code necessary
1266 for emitting location expressions. */
1267
1268 /* Data about a single source file. */
1269 struct GTY((for_user)) dwarf_file_data {
1270 const char * filename;
1271 int emitted_number;
1272 };
1273
1274 /* Describe an entry into the .debug_addr section. */
1275
1276 enum ate_kind {
1277 ate_kind_rtx,
1278 ate_kind_rtx_dtprel,
1279 ate_kind_label
1280 };
1281
1282 struct GTY((for_user)) addr_table_entry {
1283 enum ate_kind kind;
1284 unsigned int refcount;
1285 unsigned int index;
1286 union addr_table_entry_struct_union
1287 {
1288 rtx GTY ((tag ("0"))) rtl;
1289 char * GTY ((tag ("1"))) label;
1290 }
1291 GTY ((desc ("%1.kind"))) addr;
1292 };
1293
1294 typedef unsigned int var_loc_view;
1295
1296 /* Location lists are ranges + location descriptions for that range,
1297 so you can track variables that are in different places over
1298 their entire life. */
1299 typedef struct GTY(()) dw_loc_list_struct {
1300 dw_loc_list_ref dw_loc_next;
1301 const char *begin; /* Label and addr_entry for start of range */
1302 addr_table_entry *begin_entry;
1303 const char *end; /* Label for end of range */
1304 char *ll_symbol; /* Label for beginning of location list.
1305 Only on head of list. */
1306 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1307 const char *section; /* Section this loclist is relative to */
1308 dw_loc_descr_ref expr;
1309 var_loc_view vbegin, vend;
1310 hashval_t hash;
1311 /* True if all addresses in this and subsequent lists are known to be
1312 resolved. */
1313 bool resolved_addr;
1314 /* True if this list has been replaced by dw_loc_next. */
1315 bool replaced;
1316 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1317 section. */
1318 unsigned char emitted : 1;
1319 /* True if hash field is index rather than hash value. */
1320 unsigned char num_assigned : 1;
1321 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1322 unsigned char offset_emitted : 1;
1323 /* True if note_variable_value_in_expr has been called on it. */
1324 unsigned char noted_variable_value : 1;
1325 /* True if the range should be emitted even if begin and end
1326 are the same. */
1327 bool force;
1328 } dw_loc_list_node;
1329
1330 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1331 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1332
1333 /* Convert a DWARF stack opcode into its string name. */
1334
1335 static const char *
1336 dwarf_stack_op_name (unsigned int op)
1337 {
1338 const char *name = get_DW_OP_name (op);
1339
1340 if (name != NULL)
1341 return name;
1342
1343 return "OP_<unknown>";
1344 }
1345
1346 /* Return TRUE iff we're to output location view lists as a separate
1347 attribute next to the location lists, as an extension compatible
1348 with DWARF 2 and above. */
1349
1350 static inline bool
1351 dwarf2out_locviews_in_attribute ()
1352 {
1353 return debug_variable_location_views == 1;
1354 }
1355
1356 /* Return TRUE iff we're to output location view lists as part of the
1357 location lists, as proposed for standardization after DWARF 5. */
1358
1359 static inline bool
1360 dwarf2out_locviews_in_loclist ()
1361 {
1362 #ifndef DW_LLE_view_pair
1363 return false;
1364 #else
1365 return debug_variable_location_views == -1;
1366 #endif
1367 }
1368
1369 /* Return a pointer to a newly allocated location description. Location
1370 descriptions are simple expression terms that can be strung
1371 together to form more complicated location (address) descriptions. */
1372
1373 static inline dw_loc_descr_ref
1374 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1375 unsigned HOST_WIDE_INT oprnd2)
1376 {
1377 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1378
1379 descr->dw_loc_opc = op;
1380 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1381 descr->dw_loc_oprnd1.val_entry = NULL;
1382 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1383 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1384 descr->dw_loc_oprnd2.val_entry = NULL;
1385 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1386
1387 return descr;
1388 }
1389
1390 /* Add a location description term to a location description expression. */
1391
1392 static inline void
1393 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1394 {
1395 dw_loc_descr_ref *d;
1396
1397 /* Find the end of the chain. */
1398 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1399 ;
1400
1401 *d = descr;
1402 }
1403
1404 /* Compare two location operands for exact equality. */
1405
1406 static bool
1407 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1408 {
1409 if (a->val_class != b->val_class)
1410 return false;
1411 switch (a->val_class)
1412 {
1413 case dw_val_class_none:
1414 return true;
1415 case dw_val_class_addr:
1416 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1417
1418 case dw_val_class_offset:
1419 case dw_val_class_unsigned_const:
1420 case dw_val_class_const:
1421 case dw_val_class_unsigned_const_implicit:
1422 case dw_val_class_const_implicit:
1423 case dw_val_class_range_list:
1424 /* These are all HOST_WIDE_INT, signed or unsigned. */
1425 return a->v.val_unsigned == b->v.val_unsigned;
1426
1427 case dw_val_class_loc:
1428 return a->v.val_loc == b->v.val_loc;
1429 case dw_val_class_loc_list:
1430 return a->v.val_loc_list == b->v.val_loc_list;
1431 case dw_val_class_view_list:
1432 return a->v.val_view_list == b->v.val_view_list;
1433 case dw_val_class_die_ref:
1434 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1435 case dw_val_class_fde_ref:
1436 return a->v.val_fde_index == b->v.val_fde_index;
1437 case dw_val_class_symview:
1438 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1439 case dw_val_class_lbl_id:
1440 case dw_val_class_lineptr:
1441 case dw_val_class_macptr:
1442 case dw_val_class_loclistsptr:
1443 case dw_val_class_high_pc:
1444 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1445 case dw_val_class_str:
1446 return a->v.val_str == b->v.val_str;
1447 case dw_val_class_flag:
1448 return a->v.val_flag == b->v.val_flag;
1449 case dw_val_class_file:
1450 case dw_val_class_file_implicit:
1451 return a->v.val_file == b->v.val_file;
1452 case dw_val_class_decl_ref:
1453 return a->v.val_decl_ref == b->v.val_decl_ref;
1454
1455 case dw_val_class_const_double:
1456 return (a->v.val_double.high == b->v.val_double.high
1457 && a->v.val_double.low == b->v.val_double.low);
1458
1459 case dw_val_class_wide_int:
1460 return *a->v.val_wide == *b->v.val_wide;
1461
1462 case dw_val_class_vec:
1463 {
1464 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1465 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1466
1467 return (a_len == b_len
1468 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1469 }
1470
1471 case dw_val_class_data8:
1472 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1473
1474 case dw_val_class_vms_delta:
1475 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1476 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1477
1478 case dw_val_class_discr_value:
1479 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1480 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1481 case dw_val_class_discr_list:
1482 /* It makes no sense comparing two discriminant value lists. */
1483 return false;
1484 }
1485 gcc_unreachable ();
1486 }
1487
1488 /* Compare two location atoms for exact equality. */
1489
1490 static bool
1491 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1492 {
1493 if (a->dw_loc_opc != b->dw_loc_opc)
1494 return false;
1495
1496 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1497 address size, but since we always allocate cleared storage it
1498 should be zero for other types of locations. */
1499 if (a->dtprel != b->dtprel)
1500 return false;
1501
1502 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1503 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1504 }
1505
1506 /* Compare two complete location expressions for exact equality. */
1507
1508 bool
1509 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1510 {
1511 while (1)
1512 {
1513 if (a == b)
1514 return true;
1515 if (a == NULL || b == NULL)
1516 return false;
1517 if (!loc_descr_equal_p_1 (a, b))
1518 return false;
1519
1520 a = a->dw_loc_next;
1521 b = b->dw_loc_next;
1522 }
1523 }
1524
1525
1526 /* Add a constant POLY_OFFSET to a location expression. */
1527
1528 static void
1529 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1530 {
1531 dw_loc_descr_ref loc;
1532 HOST_WIDE_INT *p;
1533
1534 gcc_assert (*list_head != NULL);
1535
1536 if (known_eq (poly_offset, 0))
1537 return;
1538
1539 /* Find the end of the chain. */
1540 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1541 ;
1542
1543 HOST_WIDE_INT offset;
1544 if (!poly_offset.is_constant (&offset))
1545 {
1546 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1547 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1548 return;
1549 }
1550
1551 p = NULL;
1552 if (loc->dw_loc_opc == DW_OP_fbreg
1553 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1554 p = &loc->dw_loc_oprnd1.v.val_int;
1555 else if (loc->dw_loc_opc == DW_OP_bregx)
1556 p = &loc->dw_loc_oprnd2.v.val_int;
1557
1558 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1559 offset. Don't optimize if an signed integer overflow would happen. */
1560 if (p != NULL
1561 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1562 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1563 *p += offset;
1564
1565 else if (offset > 0)
1566 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1567
1568 else
1569 {
1570 loc->dw_loc_next
1571 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1572 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1573 }
1574 }
1575
1576 /* Return a pointer to a newly allocated location description for
1577 REG and OFFSET. */
1578
1579 static inline dw_loc_descr_ref
1580 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1581 {
1582 HOST_WIDE_INT const_offset;
1583 if (offset.is_constant (&const_offset))
1584 {
1585 if (reg <= 31)
1586 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1587 const_offset, 0);
1588 else
1589 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1590 }
1591 else
1592 {
1593 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1594 loc_descr_plus_const (&ret, offset);
1595 return ret;
1596 }
1597 }
1598
1599 /* Add a constant OFFSET to a location list. */
1600
1601 static void
1602 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1603 {
1604 dw_loc_list_ref d;
1605 for (d = list_head; d != NULL; d = d->dw_loc_next)
1606 loc_descr_plus_const (&d->expr, offset);
1607 }
1608
1609 #define DWARF_REF_SIZE \
1610 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1611
1612 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1613 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1614 DW_FORM_data16 with 128 bits. */
1615 #define DWARF_LARGEST_DATA_FORM_BITS \
1616 (dwarf_version >= 5 ? 128 : 64)
1617
1618 /* Utility inline function for construction of ops that were GNU extension
1619 before DWARF 5. */
1620 static inline enum dwarf_location_atom
1621 dwarf_OP (enum dwarf_location_atom op)
1622 {
1623 switch (op)
1624 {
1625 case DW_OP_implicit_pointer:
1626 if (dwarf_version < 5)
1627 return DW_OP_GNU_implicit_pointer;
1628 break;
1629
1630 case DW_OP_entry_value:
1631 if (dwarf_version < 5)
1632 return DW_OP_GNU_entry_value;
1633 break;
1634
1635 case DW_OP_const_type:
1636 if (dwarf_version < 5)
1637 return DW_OP_GNU_const_type;
1638 break;
1639
1640 case DW_OP_regval_type:
1641 if (dwarf_version < 5)
1642 return DW_OP_GNU_regval_type;
1643 break;
1644
1645 case DW_OP_deref_type:
1646 if (dwarf_version < 5)
1647 return DW_OP_GNU_deref_type;
1648 break;
1649
1650 case DW_OP_convert:
1651 if (dwarf_version < 5)
1652 return DW_OP_GNU_convert;
1653 break;
1654
1655 case DW_OP_reinterpret:
1656 if (dwarf_version < 5)
1657 return DW_OP_GNU_reinterpret;
1658 break;
1659
1660 case DW_OP_addrx:
1661 if (dwarf_version < 5)
1662 return DW_OP_GNU_addr_index;
1663 break;
1664
1665 case DW_OP_constx:
1666 if (dwarf_version < 5)
1667 return DW_OP_GNU_const_index;
1668 break;
1669
1670 default:
1671 break;
1672 }
1673 return op;
1674 }
1675
1676 /* Similarly for attributes. */
1677 static inline enum dwarf_attribute
1678 dwarf_AT (enum dwarf_attribute at)
1679 {
1680 switch (at)
1681 {
1682 case DW_AT_call_return_pc:
1683 if (dwarf_version < 5)
1684 return DW_AT_low_pc;
1685 break;
1686
1687 case DW_AT_call_tail_call:
1688 if (dwarf_version < 5)
1689 return DW_AT_GNU_tail_call;
1690 break;
1691
1692 case DW_AT_call_origin:
1693 if (dwarf_version < 5)
1694 return DW_AT_abstract_origin;
1695 break;
1696
1697 case DW_AT_call_target:
1698 if (dwarf_version < 5)
1699 return DW_AT_GNU_call_site_target;
1700 break;
1701
1702 case DW_AT_call_target_clobbered:
1703 if (dwarf_version < 5)
1704 return DW_AT_GNU_call_site_target_clobbered;
1705 break;
1706
1707 case DW_AT_call_parameter:
1708 if (dwarf_version < 5)
1709 return DW_AT_abstract_origin;
1710 break;
1711
1712 case DW_AT_call_value:
1713 if (dwarf_version < 5)
1714 return DW_AT_GNU_call_site_value;
1715 break;
1716
1717 case DW_AT_call_data_value:
1718 if (dwarf_version < 5)
1719 return DW_AT_GNU_call_site_data_value;
1720 break;
1721
1722 case DW_AT_call_all_calls:
1723 if (dwarf_version < 5)
1724 return DW_AT_GNU_all_call_sites;
1725 break;
1726
1727 case DW_AT_call_all_tail_calls:
1728 if (dwarf_version < 5)
1729 return DW_AT_GNU_all_tail_call_sites;
1730 break;
1731
1732 case DW_AT_dwo_name:
1733 if (dwarf_version < 5)
1734 return DW_AT_GNU_dwo_name;
1735 break;
1736
1737 case DW_AT_addr_base:
1738 if (dwarf_version < 5)
1739 return DW_AT_GNU_addr_base;
1740 break;
1741
1742 default:
1743 break;
1744 }
1745 return at;
1746 }
1747
1748 /* And similarly for tags. */
1749 static inline enum dwarf_tag
1750 dwarf_TAG (enum dwarf_tag tag)
1751 {
1752 switch (tag)
1753 {
1754 case DW_TAG_call_site:
1755 if (dwarf_version < 5)
1756 return DW_TAG_GNU_call_site;
1757 break;
1758
1759 case DW_TAG_call_site_parameter:
1760 if (dwarf_version < 5)
1761 return DW_TAG_GNU_call_site_parameter;
1762 break;
1763
1764 default:
1765 break;
1766 }
1767 return tag;
1768 }
1769
1770 static unsigned long int get_base_type_offset (dw_die_ref);
1771
1772 /* Return the size of a location descriptor. */
1773
1774 static unsigned long
1775 size_of_loc_descr (dw_loc_descr_ref loc)
1776 {
1777 unsigned long size = 1;
1778
1779 switch (loc->dw_loc_opc)
1780 {
1781 case DW_OP_addr:
1782 size += DWARF2_ADDR_SIZE;
1783 break;
1784 case DW_OP_GNU_addr_index:
1785 case DW_OP_addrx:
1786 case DW_OP_GNU_const_index:
1787 case DW_OP_constx:
1788 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1789 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1790 break;
1791 case DW_OP_const1u:
1792 case DW_OP_const1s:
1793 size += 1;
1794 break;
1795 case DW_OP_const2u:
1796 case DW_OP_const2s:
1797 size += 2;
1798 break;
1799 case DW_OP_const4u:
1800 case DW_OP_const4s:
1801 size += 4;
1802 break;
1803 case DW_OP_const8u:
1804 case DW_OP_const8s:
1805 size += 8;
1806 break;
1807 case DW_OP_constu:
1808 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1809 break;
1810 case DW_OP_consts:
1811 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1812 break;
1813 case DW_OP_pick:
1814 size += 1;
1815 break;
1816 case DW_OP_plus_uconst:
1817 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1818 break;
1819 case DW_OP_skip:
1820 case DW_OP_bra:
1821 size += 2;
1822 break;
1823 case DW_OP_breg0:
1824 case DW_OP_breg1:
1825 case DW_OP_breg2:
1826 case DW_OP_breg3:
1827 case DW_OP_breg4:
1828 case DW_OP_breg5:
1829 case DW_OP_breg6:
1830 case DW_OP_breg7:
1831 case DW_OP_breg8:
1832 case DW_OP_breg9:
1833 case DW_OP_breg10:
1834 case DW_OP_breg11:
1835 case DW_OP_breg12:
1836 case DW_OP_breg13:
1837 case DW_OP_breg14:
1838 case DW_OP_breg15:
1839 case DW_OP_breg16:
1840 case DW_OP_breg17:
1841 case DW_OP_breg18:
1842 case DW_OP_breg19:
1843 case DW_OP_breg20:
1844 case DW_OP_breg21:
1845 case DW_OP_breg22:
1846 case DW_OP_breg23:
1847 case DW_OP_breg24:
1848 case DW_OP_breg25:
1849 case DW_OP_breg26:
1850 case DW_OP_breg27:
1851 case DW_OP_breg28:
1852 case DW_OP_breg29:
1853 case DW_OP_breg30:
1854 case DW_OP_breg31:
1855 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1856 break;
1857 case DW_OP_regx:
1858 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1859 break;
1860 case DW_OP_fbreg:
1861 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1862 break;
1863 case DW_OP_bregx:
1864 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1865 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1866 break;
1867 case DW_OP_piece:
1868 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1869 break;
1870 case DW_OP_bit_piece:
1871 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1872 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1873 break;
1874 case DW_OP_deref_size:
1875 case DW_OP_xderef_size:
1876 size += 1;
1877 break;
1878 case DW_OP_call2:
1879 size += 2;
1880 break;
1881 case DW_OP_call4:
1882 size += 4;
1883 break;
1884 case DW_OP_call_ref:
1885 case DW_OP_GNU_variable_value:
1886 size += DWARF_REF_SIZE;
1887 break;
1888 case DW_OP_implicit_value:
1889 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1890 + loc->dw_loc_oprnd1.v.val_unsigned;
1891 break;
1892 case DW_OP_implicit_pointer:
1893 case DW_OP_GNU_implicit_pointer:
1894 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1895 break;
1896 case DW_OP_entry_value:
1897 case DW_OP_GNU_entry_value:
1898 {
1899 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1900 size += size_of_uleb128 (op_size) + op_size;
1901 break;
1902 }
1903 case DW_OP_const_type:
1904 case DW_OP_GNU_const_type:
1905 {
1906 unsigned long o
1907 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1908 size += size_of_uleb128 (o) + 1;
1909 switch (loc->dw_loc_oprnd2.val_class)
1910 {
1911 case dw_val_class_vec:
1912 size += loc->dw_loc_oprnd2.v.val_vec.length
1913 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1914 break;
1915 case dw_val_class_const:
1916 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1917 break;
1918 case dw_val_class_const_double:
1919 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1920 break;
1921 case dw_val_class_wide_int:
1922 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1923 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1924 break;
1925 default:
1926 gcc_unreachable ();
1927 }
1928 break;
1929 }
1930 case DW_OP_regval_type:
1931 case DW_OP_GNU_regval_type:
1932 {
1933 unsigned long o
1934 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1935 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1936 + size_of_uleb128 (o);
1937 }
1938 break;
1939 case DW_OP_deref_type:
1940 case DW_OP_GNU_deref_type:
1941 {
1942 unsigned long o
1943 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1944 size += 1 + size_of_uleb128 (o);
1945 }
1946 break;
1947 case DW_OP_convert:
1948 case DW_OP_reinterpret:
1949 case DW_OP_GNU_convert:
1950 case DW_OP_GNU_reinterpret:
1951 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1952 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1953 else
1954 {
1955 unsigned long o
1956 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1957 size += size_of_uleb128 (o);
1958 }
1959 break;
1960 case DW_OP_GNU_parameter_ref:
1961 size += 4;
1962 break;
1963 default:
1964 break;
1965 }
1966
1967 return size;
1968 }
1969
1970 /* Return the size of a series of location descriptors. */
1971
1972 unsigned long
1973 size_of_locs (dw_loc_descr_ref loc)
1974 {
1975 dw_loc_descr_ref l;
1976 unsigned long size;
1977
1978 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1979 field, to avoid writing to a PCH file. */
1980 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1981 {
1982 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1983 break;
1984 size += size_of_loc_descr (l);
1985 }
1986 if (! l)
1987 return size;
1988
1989 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1990 {
1991 l->dw_loc_addr = size;
1992 size += size_of_loc_descr (l);
1993 }
1994
1995 return size;
1996 }
1997
1998 /* Return the size of the value in a DW_AT_discr_value attribute. */
1999
2000 static int
2001 size_of_discr_value (dw_discr_value *discr_value)
2002 {
2003 if (discr_value->pos)
2004 return size_of_uleb128 (discr_value->v.uval);
2005 else
2006 return size_of_sleb128 (discr_value->v.sval);
2007 }
2008
2009 /* Return the size of the value in a DW_AT_discr_list attribute. */
2010
2011 static int
2012 size_of_discr_list (dw_discr_list_ref discr_list)
2013 {
2014 int size = 0;
2015
2016 for (dw_discr_list_ref list = discr_list;
2017 list != NULL;
2018 list = list->dw_discr_next)
2019 {
2020 /* One byte for the discriminant value descriptor, and then one or two
2021 LEB128 numbers, depending on whether it's a single case label or a
2022 range label. */
2023 size += 1;
2024 size += size_of_discr_value (&list->dw_discr_lower_bound);
2025 if (list->dw_discr_range != 0)
2026 size += size_of_discr_value (&list->dw_discr_upper_bound);
2027 }
2028 return size;
2029 }
2030
2031 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2032 static void get_ref_die_offset_label (char *, dw_die_ref);
2033 static unsigned long int get_ref_die_offset (dw_die_ref);
2034
2035 /* Output location description stack opcode's operands (if any).
2036 The for_eh_or_skip parameter controls whether register numbers are
2037 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2038 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2039 info). This should be suppressed for the cases that have not been converted
2040 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2041
2042 static void
2043 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2044 {
2045 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2046 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2047
2048 switch (loc->dw_loc_opc)
2049 {
2050 #ifdef DWARF2_DEBUGGING_INFO
2051 case DW_OP_const2u:
2052 case DW_OP_const2s:
2053 dw2_asm_output_data (2, val1->v.val_int, NULL);
2054 break;
2055 case DW_OP_const4u:
2056 if (loc->dtprel)
2057 {
2058 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2059 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2060 val1->v.val_addr);
2061 fputc ('\n', asm_out_file);
2062 break;
2063 }
2064 /* FALLTHRU */
2065 case DW_OP_const4s:
2066 dw2_asm_output_data (4, val1->v.val_int, NULL);
2067 break;
2068 case DW_OP_const8u:
2069 if (loc->dtprel)
2070 {
2071 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2072 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2073 val1->v.val_addr);
2074 fputc ('\n', asm_out_file);
2075 break;
2076 }
2077 /* FALLTHRU */
2078 case DW_OP_const8s:
2079 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2080 dw2_asm_output_data (8, val1->v.val_int, NULL);
2081 break;
2082 case DW_OP_skip:
2083 case DW_OP_bra:
2084 {
2085 int offset;
2086
2087 gcc_assert (val1->val_class == dw_val_class_loc);
2088 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2089
2090 dw2_asm_output_data (2, offset, NULL);
2091 }
2092 break;
2093 case DW_OP_implicit_value:
2094 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2095 switch (val2->val_class)
2096 {
2097 case dw_val_class_const:
2098 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2099 break;
2100 case dw_val_class_vec:
2101 {
2102 unsigned int elt_size = val2->v.val_vec.elt_size;
2103 unsigned int len = val2->v.val_vec.length;
2104 unsigned int i;
2105 unsigned char *p;
2106
2107 if (elt_size > sizeof (HOST_WIDE_INT))
2108 {
2109 elt_size /= 2;
2110 len *= 2;
2111 }
2112 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2113 i < len;
2114 i++, p += elt_size)
2115 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2116 "fp or vector constant word %u", i);
2117 }
2118 break;
2119 case dw_val_class_const_double:
2120 {
2121 unsigned HOST_WIDE_INT first, second;
2122
2123 if (WORDS_BIG_ENDIAN)
2124 {
2125 first = val2->v.val_double.high;
2126 second = val2->v.val_double.low;
2127 }
2128 else
2129 {
2130 first = val2->v.val_double.low;
2131 second = val2->v.val_double.high;
2132 }
2133 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2134 first, NULL);
2135 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2136 second, NULL);
2137 }
2138 break;
2139 case dw_val_class_wide_int:
2140 {
2141 int i;
2142 int len = get_full_len (*val2->v.val_wide);
2143 if (WORDS_BIG_ENDIAN)
2144 for (i = len - 1; i >= 0; --i)
2145 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2146 val2->v.val_wide->elt (i), NULL);
2147 else
2148 for (i = 0; i < len; ++i)
2149 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2150 val2->v.val_wide->elt (i), NULL);
2151 }
2152 break;
2153 case dw_val_class_addr:
2154 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2155 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2156 break;
2157 default:
2158 gcc_unreachable ();
2159 }
2160 break;
2161 #else
2162 case DW_OP_const2u:
2163 case DW_OP_const2s:
2164 case DW_OP_const4u:
2165 case DW_OP_const4s:
2166 case DW_OP_const8u:
2167 case DW_OP_const8s:
2168 case DW_OP_skip:
2169 case DW_OP_bra:
2170 case DW_OP_implicit_value:
2171 /* We currently don't make any attempt to make sure these are
2172 aligned properly like we do for the main unwind info, so
2173 don't support emitting things larger than a byte if we're
2174 only doing unwinding. */
2175 gcc_unreachable ();
2176 #endif
2177 case DW_OP_const1u:
2178 case DW_OP_const1s:
2179 dw2_asm_output_data (1, val1->v.val_int, NULL);
2180 break;
2181 case DW_OP_constu:
2182 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2183 break;
2184 case DW_OP_consts:
2185 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2186 break;
2187 case DW_OP_pick:
2188 dw2_asm_output_data (1, val1->v.val_int, NULL);
2189 break;
2190 case DW_OP_plus_uconst:
2191 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2192 break;
2193 case DW_OP_breg0:
2194 case DW_OP_breg1:
2195 case DW_OP_breg2:
2196 case DW_OP_breg3:
2197 case DW_OP_breg4:
2198 case DW_OP_breg5:
2199 case DW_OP_breg6:
2200 case DW_OP_breg7:
2201 case DW_OP_breg8:
2202 case DW_OP_breg9:
2203 case DW_OP_breg10:
2204 case DW_OP_breg11:
2205 case DW_OP_breg12:
2206 case DW_OP_breg13:
2207 case DW_OP_breg14:
2208 case DW_OP_breg15:
2209 case DW_OP_breg16:
2210 case DW_OP_breg17:
2211 case DW_OP_breg18:
2212 case DW_OP_breg19:
2213 case DW_OP_breg20:
2214 case DW_OP_breg21:
2215 case DW_OP_breg22:
2216 case DW_OP_breg23:
2217 case DW_OP_breg24:
2218 case DW_OP_breg25:
2219 case DW_OP_breg26:
2220 case DW_OP_breg27:
2221 case DW_OP_breg28:
2222 case DW_OP_breg29:
2223 case DW_OP_breg30:
2224 case DW_OP_breg31:
2225 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2226 break;
2227 case DW_OP_regx:
2228 {
2229 unsigned r = val1->v.val_unsigned;
2230 if (for_eh_or_skip >= 0)
2231 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2232 gcc_assert (size_of_uleb128 (r)
2233 == size_of_uleb128 (val1->v.val_unsigned));
2234 dw2_asm_output_data_uleb128 (r, NULL);
2235 }
2236 break;
2237 case DW_OP_fbreg:
2238 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2239 break;
2240 case DW_OP_bregx:
2241 {
2242 unsigned r = val1->v.val_unsigned;
2243 if (for_eh_or_skip >= 0)
2244 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2245 gcc_assert (size_of_uleb128 (r)
2246 == size_of_uleb128 (val1->v.val_unsigned));
2247 dw2_asm_output_data_uleb128 (r, NULL);
2248 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2249 }
2250 break;
2251 case DW_OP_piece:
2252 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2253 break;
2254 case DW_OP_bit_piece:
2255 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2256 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2257 break;
2258 case DW_OP_deref_size:
2259 case DW_OP_xderef_size:
2260 dw2_asm_output_data (1, val1->v.val_int, NULL);
2261 break;
2262
2263 case DW_OP_addr:
2264 if (loc->dtprel)
2265 {
2266 if (targetm.asm_out.output_dwarf_dtprel)
2267 {
2268 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2269 DWARF2_ADDR_SIZE,
2270 val1->v.val_addr);
2271 fputc ('\n', asm_out_file);
2272 }
2273 else
2274 gcc_unreachable ();
2275 }
2276 else
2277 {
2278 #ifdef DWARF2_DEBUGGING_INFO
2279 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2280 #else
2281 gcc_unreachable ();
2282 #endif
2283 }
2284 break;
2285
2286 case DW_OP_GNU_addr_index:
2287 case DW_OP_addrx:
2288 case DW_OP_GNU_const_index:
2289 case DW_OP_constx:
2290 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2291 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2292 "(index into .debug_addr)");
2293 break;
2294
2295 case DW_OP_call2:
2296 case DW_OP_call4:
2297 {
2298 unsigned long die_offset
2299 = get_ref_die_offset (val1->v.val_die_ref.die);
2300 /* Make sure the offset has been computed and that we can encode it as
2301 an operand. */
2302 gcc_assert (die_offset > 0
2303 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2304 ? 0xffff
2305 : 0xffffffff));
2306 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2307 die_offset, NULL);
2308 }
2309 break;
2310
2311 case DW_OP_call_ref:
2312 case DW_OP_GNU_variable_value:
2313 {
2314 char label[MAX_ARTIFICIAL_LABEL_BYTES
2315 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2316 gcc_assert (val1->val_class == dw_val_class_die_ref);
2317 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2318 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2319 }
2320 break;
2321
2322 case DW_OP_implicit_pointer:
2323 case DW_OP_GNU_implicit_pointer:
2324 {
2325 char label[MAX_ARTIFICIAL_LABEL_BYTES
2326 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2327 gcc_assert (val1->val_class == dw_val_class_die_ref);
2328 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2329 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2330 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2331 }
2332 break;
2333
2334 case DW_OP_entry_value:
2335 case DW_OP_GNU_entry_value:
2336 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2337 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2338 break;
2339
2340 case DW_OP_const_type:
2341 case DW_OP_GNU_const_type:
2342 {
2343 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2344 gcc_assert (o);
2345 dw2_asm_output_data_uleb128 (o, NULL);
2346 switch (val2->val_class)
2347 {
2348 case dw_val_class_const:
2349 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2350 dw2_asm_output_data (1, l, NULL);
2351 dw2_asm_output_data (l, val2->v.val_int, NULL);
2352 break;
2353 case dw_val_class_vec:
2354 {
2355 unsigned int elt_size = val2->v.val_vec.elt_size;
2356 unsigned int len = val2->v.val_vec.length;
2357 unsigned int i;
2358 unsigned char *p;
2359
2360 l = len * elt_size;
2361 dw2_asm_output_data (1, l, NULL);
2362 if (elt_size > sizeof (HOST_WIDE_INT))
2363 {
2364 elt_size /= 2;
2365 len *= 2;
2366 }
2367 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2368 i < len;
2369 i++, p += elt_size)
2370 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2371 "fp or vector constant word %u", i);
2372 }
2373 break;
2374 case dw_val_class_const_double:
2375 {
2376 unsigned HOST_WIDE_INT first, second;
2377 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2378
2379 dw2_asm_output_data (1, 2 * l, NULL);
2380 if (WORDS_BIG_ENDIAN)
2381 {
2382 first = val2->v.val_double.high;
2383 second = val2->v.val_double.low;
2384 }
2385 else
2386 {
2387 first = val2->v.val_double.low;
2388 second = val2->v.val_double.high;
2389 }
2390 dw2_asm_output_data (l, first, NULL);
2391 dw2_asm_output_data (l, second, NULL);
2392 }
2393 break;
2394 case dw_val_class_wide_int:
2395 {
2396 int i;
2397 int len = get_full_len (*val2->v.val_wide);
2398 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2399
2400 dw2_asm_output_data (1, len * l, NULL);
2401 if (WORDS_BIG_ENDIAN)
2402 for (i = len - 1; i >= 0; --i)
2403 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2404 else
2405 for (i = 0; i < len; ++i)
2406 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2407 }
2408 break;
2409 default:
2410 gcc_unreachable ();
2411 }
2412 }
2413 break;
2414 case DW_OP_regval_type:
2415 case DW_OP_GNU_regval_type:
2416 {
2417 unsigned r = val1->v.val_unsigned;
2418 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2419 gcc_assert (o);
2420 if (for_eh_or_skip >= 0)
2421 {
2422 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2423 gcc_assert (size_of_uleb128 (r)
2424 == size_of_uleb128 (val1->v.val_unsigned));
2425 }
2426 dw2_asm_output_data_uleb128 (r, NULL);
2427 dw2_asm_output_data_uleb128 (o, NULL);
2428 }
2429 break;
2430 case DW_OP_deref_type:
2431 case DW_OP_GNU_deref_type:
2432 {
2433 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2434 gcc_assert (o);
2435 dw2_asm_output_data (1, val1->v.val_int, NULL);
2436 dw2_asm_output_data_uleb128 (o, NULL);
2437 }
2438 break;
2439 case DW_OP_convert:
2440 case DW_OP_reinterpret:
2441 case DW_OP_GNU_convert:
2442 case DW_OP_GNU_reinterpret:
2443 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2444 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2445 else
2446 {
2447 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2448 gcc_assert (o);
2449 dw2_asm_output_data_uleb128 (o, NULL);
2450 }
2451 break;
2452
2453 case DW_OP_GNU_parameter_ref:
2454 {
2455 unsigned long o;
2456 gcc_assert (val1->val_class == dw_val_class_die_ref);
2457 o = get_ref_die_offset (val1->v.val_die_ref.die);
2458 dw2_asm_output_data (4, o, NULL);
2459 }
2460 break;
2461
2462 default:
2463 /* Other codes have no operands. */
2464 break;
2465 }
2466 }
2467
2468 /* Output a sequence of location operations.
2469 The for_eh_or_skip parameter controls whether register numbers are
2470 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2471 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2472 info). This should be suppressed for the cases that have not been converted
2473 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2474
2475 void
2476 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2477 {
2478 for (; loc != NULL; loc = loc->dw_loc_next)
2479 {
2480 enum dwarf_location_atom opc = loc->dw_loc_opc;
2481 /* Output the opcode. */
2482 if (for_eh_or_skip >= 0
2483 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2484 {
2485 unsigned r = (opc - DW_OP_breg0);
2486 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2487 gcc_assert (r <= 31);
2488 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2489 }
2490 else if (for_eh_or_skip >= 0
2491 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2492 {
2493 unsigned r = (opc - DW_OP_reg0);
2494 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2495 gcc_assert (r <= 31);
2496 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2497 }
2498
2499 dw2_asm_output_data (1, opc,
2500 "%s", dwarf_stack_op_name (opc));
2501
2502 /* Output the operand(s) (if any). */
2503 output_loc_operands (loc, for_eh_or_skip);
2504 }
2505 }
2506
2507 /* Output location description stack opcode's operands (if any).
2508 The output is single bytes on a line, suitable for .cfi_escape. */
2509
2510 static void
2511 output_loc_operands_raw (dw_loc_descr_ref loc)
2512 {
2513 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2514 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2515
2516 switch (loc->dw_loc_opc)
2517 {
2518 case DW_OP_addr:
2519 case DW_OP_GNU_addr_index:
2520 case DW_OP_addrx:
2521 case DW_OP_GNU_const_index:
2522 case DW_OP_constx:
2523 case DW_OP_implicit_value:
2524 /* We cannot output addresses in .cfi_escape, only bytes. */
2525 gcc_unreachable ();
2526
2527 case DW_OP_const1u:
2528 case DW_OP_const1s:
2529 case DW_OP_pick:
2530 case DW_OP_deref_size:
2531 case DW_OP_xderef_size:
2532 fputc (',', asm_out_file);
2533 dw2_asm_output_data_raw (1, val1->v.val_int);
2534 break;
2535
2536 case DW_OP_const2u:
2537 case DW_OP_const2s:
2538 fputc (',', asm_out_file);
2539 dw2_asm_output_data_raw (2, val1->v.val_int);
2540 break;
2541
2542 case DW_OP_const4u:
2543 case DW_OP_const4s:
2544 fputc (',', asm_out_file);
2545 dw2_asm_output_data_raw (4, val1->v.val_int);
2546 break;
2547
2548 case DW_OP_const8u:
2549 case DW_OP_const8s:
2550 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2551 fputc (',', asm_out_file);
2552 dw2_asm_output_data_raw (8, val1->v.val_int);
2553 break;
2554
2555 case DW_OP_skip:
2556 case DW_OP_bra:
2557 {
2558 int offset;
2559
2560 gcc_assert (val1->val_class == dw_val_class_loc);
2561 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2562
2563 fputc (',', asm_out_file);
2564 dw2_asm_output_data_raw (2, offset);
2565 }
2566 break;
2567
2568 case DW_OP_regx:
2569 {
2570 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2571 gcc_assert (size_of_uleb128 (r)
2572 == size_of_uleb128 (val1->v.val_unsigned));
2573 fputc (',', asm_out_file);
2574 dw2_asm_output_data_uleb128_raw (r);
2575 }
2576 break;
2577
2578 case DW_OP_constu:
2579 case DW_OP_plus_uconst:
2580 case DW_OP_piece:
2581 fputc (',', asm_out_file);
2582 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2583 break;
2584
2585 case DW_OP_bit_piece:
2586 fputc (',', asm_out_file);
2587 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2588 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2589 break;
2590
2591 case DW_OP_consts:
2592 case DW_OP_breg0:
2593 case DW_OP_breg1:
2594 case DW_OP_breg2:
2595 case DW_OP_breg3:
2596 case DW_OP_breg4:
2597 case DW_OP_breg5:
2598 case DW_OP_breg6:
2599 case DW_OP_breg7:
2600 case DW_OP_breg8:
2601 case DW_OP_breg9:
2602 case DW_OP_breg10:
2603 case DW_OP_breg11:
2604 case DW_OP_breg12:
2605 case DW_OP_breg13:
2606 case DW_OP_breg14:
2607 case DW_OP_breg15:
2608 case DW_OP_breg16:
2609 case DW_OP_breg17:
2610 case DW_OP_breg18:
2611 case DW_OP_breg19:
2612 case DW_OP_breg20:
2613 case DW_OP_breg21:
2614 case DW_OP_breg22:
2615 case DW_OP_breg23:
2616 case DW_OP_breg24:
2617 case DW_OP_breg25:
2618 case DW_OP_breg26:
2619 case DW_OP_breg27:
2620 case DW_OP_breg28:
2621 case DW_OP_breg29:
2622 case DW_OP_breg30:
2623 case DW_OP_breg31:
2624 case DW_OP_fbreg:
2625 fputc (',', asm_out_file);
2626 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2627 break;
2628
2629 case DW_OP_bregx:
2630 {
2631 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2632 gcc_assert (size_of_uleb128 (r)
2633 == size_of_uleb128 (val1->v.val_unsigned));
2634 fputc (',', asm_out_file);
2635 dw2_asm_output_data_uleb128_raw (r);
2636 fputc (',', asm_out_file);
2637 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2638 }
2639 break;
2640
2641 case DW_OP_implicit_pointer:
2642 case DW_OP_entry_value:
2643 case DW_OP_const_type:
2644 case DW_OP_regval_type:
2645 case DW_OP_deref_type:
2646 case DW_OP_convert:
2647 case DW_OP_reinterpret:
2648 case DW_OP_GNU_implicit_pointer:
2649 case DW_OP_GNU_entry_value:
2650 case DW_OP_GNU_const_type:
2651 case DW_OP_GNU_regval_type:
2652 case DW_OP_GNU_deref_type:
2653 case DW_OP_GNU_convert:
2654 case DW_OP_GNU_reinterpret:
2655 case DW_OP_GNU_parameter_ref:
2656 gcc_unreachable ();
2657 break;
2658
2659 default:
2660 /* Other codes have no operands. */
2661 break;
2662 }
2663 }
2664
2665 void
2666 output_loc_sequence_raw (dw_loc_descr_ref loc)
2667 {
2668 while (1)
2669 {
2670 enum dwarf_location_atom opc = loc->dw_loc_opc;
2671 /* Output the opcode. */
2672 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2673 {
2674 unsigned r = (opc - DW_OP_breg0);
2675 r = DWARF2_FRAME_REG_OUT (r, 1);
2676 gcc_assert (r <= 31);
2677 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2678 }
2679 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2680 {
2681 unsigned r = (opc - DW_OP_reg0);
2682 r = DWARF2_FRAME_REG_OUT (r, 1);
2683 gcc_assert (r <= 31);
2684 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2685 }
2686 /* Output the opcode. */
2687 fprintf (asm_out_file, "%#x", opc);
2688 output_loc_operands_raw (loc);
2689
2690 if (!loc->dw_loc_next)
2691 break;
2692 loc = loc->dw_loc_next;
2693
2694 fputc (',', asm_out_file);
2695 }
2696 }
2697
2698 /* This function builds a dwarf location descriptor sequence from a
2699 dw_cfa_location, adding the given OFFSET to the result of the
2700 expression. */
2701
2702 struct dw_loc_descr_node *
2703 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2704 {
2705 struct dw_loc_descr_node *head, *tmp;
2706
2707 offset += cfa->offset;
2708
2709 if (cfa->indirect)
2710 {
2711 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2712 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2713 head->dw_loc_oprnd1.val_entry = NULL;
2714 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2715 add_loc_descr (&head, tmp);
2716 loc_descr_plus_const (&head, offset);
2717 }
2718 else
2719 head = new_reg_loc_descr (cfa->reg, offset);
2720
2721 return head;
2722 }
2723
2724 /* This function builds a dwarf location descriptor sequence for
2725 the address at OFFSET from the CFA when stack is aligned to
2726 ALIGNMENT byte. */
2727
2728 struct dw_loc_descr_node *
2729 build_cfa_aligned_loc (dw_cfa_location *cfa,
2730 poly_int64 offset, HOST_WIDE_INT alignment)
2731 {
2732 struct dw_loc_descr_node *head;
2733 unsigned int dwarf_fp
2734 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2735
2736 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2737 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2738 {
2739 head = new_reg_loc_descr (dwarf_fp, 0);
2740 add_loc_descr (&head, int_loc_descriptor (alignment));
2741 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2742 loc_descr_plus_const (&head, offset);
2743 }
2744 else
2745 head = new_reg_loc_descr (dwarf_fp, offset);
2746 return head;
2747 }
2748 \f
2749 /* And now, the support for symbolic debugging information. */
2750
2751 /* .debug_str support. */
2752
2753 static void dwarf2out_init (const char *);
2754 static void dwarf2out_finish (const char *);
2755 static void dwarf2out_early_finish (const char *);
2756 static void dwarf2out_assembly_start (void);
2757 static void dwarf2out_define (unsigned int, const char *);
2758 static void dwarf2out_undef (unsigned int, const char *);
2759 static void dwarf2out_start_source_file (unsigned, const char *);
2760 static void dwarf2out_end_source_file (unsigned);
2761 static void dwarf2out_function_decl (tree);
2762 static void dwarf2out_begin_block (unsigned, unsigned);
2763 static void dwarf2out_end_block (unsigned, unsigned);
2764 static bool dwarf2out_ignore_block (const_tree);
2765 static void dwarf2out_early_global_decl (tree);
2766 static void dwarf2out_late_global_decl (tree);
2767 static void dwarf2out_type_decl (tree, int);
2768 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2769 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2770 dw_die_ref);
2771 static void dwarf2out_abstract_function (tree);
2772 static void dwarf2out_var_location (rtx_insn *);
2773 static void dwarf2out_inline_entry (tree);
2774 static void dwarf2out_size_function (tree);
2775 static void dwarf2out_begin_function (tree);
2776 static void dwarf2out_end_function (unsigned int);
2777 static void dwarf2out_register_main_translation_unit (tree unit);
2778 static void dwarf2out_set_name (tree, tree);
2779 static void dwarf2out_register_external_die (tree decl, const char *sym,
2780 unsigned HOST_WIDE_INT off);
2781 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2782 unsigned HOST_WIDE_INT *off);
2783
2784 /* The debug hooks structure. */
2785
2786 const struct gcc_debug_hooks dwarf2_debug_hooks =
2787 {
2788 dwarf2out_init,
2789 dwarf2out_finish,
2790 dwarf2out_early_finish,
2791 dwarf2out_assembly_start,
2792 dwarf2out_define,
2793 dwarf2out_undef,
2794 dwarf2out_start_source_file,
2795 dwarf2out_end_source_file,
2796 dwarf2out_begin_block,
2797 dwarf2out_end_block,
2798 dwarf2out_ignore_block,
2799 dwarf2out_source_line,
2800 dwarf2out_begin_prologue,
2801 #if VMS_DEBUGGING_INFO
2802 dwarf2out_vms_end_prologue,
2803 dwarf2out_vms_begin_epilogue,
2804 #else
2805 debug_nothing_int_charstar,
2806 debug_nothing_int_charstar,
2807 #endif
2808 dwarf2out_end_epilogue,
2809 dwarf2out_begin_function,
2810 dwarf2out_end_function, /* end_function */
2811 dwarf2out_register_main_translation_unit,
2812 dwarf2out_function_decl, /* function_decl */
2813 dwarf2out_early_global_decl,
2814 dwarf2out_late_global_decl,
2815 dwarf2out_type_decl, /* type_decl */
2816 dwarf2out_imported_module_or_decl,
2817 dwarf2out_die_ref_for_decl,
2818 dwarf2out_register_external_die,
2819 debug_nothing_tree, /* deferred_inline_function */
2820 /* The DWARF 2 backend tries to reduce debugging bloat by not
2821 emitting the abstract description of inline functions until
2822 something tries to reference them. */
2823 dwarf2out_abstract_function, /* outlining_inline_function */
2824 debug_nothing_rtx_code_label, /* label */
2825 debug_nothing_int, /* handle_pch */
2826 dwarf2out_var_location,
2827 dwarf2out_inline_entry, /* inline_entry */
2828 dwarf2out_size_function, /* size_function */
2829 dwarf2out_switch_text_section,
2830 dwarf2out_set_name,
2831 1, /* start_end_main_source_file */
2832 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2833 };
2834
2835 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2836 {
2837 dwarf2out_init,
2838 debug_nothing_charstar,
2839 debug_nothing_charstar,
2840 dwarf2out_assembly_start,
2841 debug_nothing_int_charstar,
2842 debug_nothing_int_charstar,
2843 debug_nothing_int_charstar,
2844 debug_nothing_int,
2845 debug_nothing_int_int, /* begin_block */
2846 debug_nothing_int_int, /* end_block */
2847 debug_true_const_tree, /* ignore_block */
2848 dwarf2out_source_line, /* source_line */
2849 debug_nothing_int_int_charstar, /* begin_prologue */
2850 debug_nothing_int_charstar, /* end_prologue */
2851 debug_nothing_int_charstar, /* begin_epilogue */
2852 debug_nothing_int_charstar, /* end_epilogue */
2853 debug_nothing_tree, /* begin_function */
2854 debug_nothing_int, /* end_function */
2855 debug_nothing_tree, /* register_main_translation_unit */
2856 debug_nothing_tree, /* function_decl */
2857 debug_nothing_tree, /* early_global_decl */
2858 debug_nothing_tree, /* late_global_decl */
2859 debug_nothing_tree_int, /* type_decl */
2860 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2861 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2862 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2863 debug_nothing_tree, /* deferred_inline_function */
2864 debug_nothing_tree, /* outlining_inline_function */
2865 debug_nothing_rtx_code_label, /* label */
2866 debug_nothing_int, /* handle_pch */
2867 debug_nothing_rtx_insn, /* var_location */
2868 debug_nothing_tree, /* inline_entry */
2869 debug_nothing_tree, /* size_function */
2870 debug_nothing_void, /* switch_text_section */
2871 debug_nothing_tree_tree, /* set_name */
2872 0, /* start_end_main_source_file */
2873 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2874 };
2875 \f
2876 /* NOTE: In the comments in this file, many references are made to
2877 "Debugging Information Entries". This term is abbreviated as `DIE'
2878 throughout the remainder of this file. */
2879
2880 /* An internal representation of the DWARF output is built, and then
2881 walked to generate the DWARF debugging info. The walk of the internal
2882 representation is done after the entire program has been compiled.
2883 The types below are used to describe the internal representation. */
2884
2885 /* Whether to put type DIEs into their own section .debug_types instead
2886 of making them part of the .debug_info section. Only supported for
2887 Dwarf V4 or higher and the user didn't disable them through
2888 -fno-debug-types-section. It is more efficient to put them in a
2889 separate comdat sections since the linker will then be able to
2890 remove duplicates. But not all tools support .debug_types sections
2891 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2892 it is DW_UT_type unit type in .debug_info section. */
2893
2894 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2895
2896 /* Various DIE's use offsets relative to the beginning of the
2897 .debug_info section to refer to each other. */
2898
2899 typedef long int dw_offset;
2900
2901 struct comdat_type_node;
2902
2903 /* The entries in the line_info table more-or-less mirror the opcodes
2904 that are used in the real dwarf line table. Arrays of these entries
2905 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2906 supported. */
2907
2908 enum dw_line_info_opcode {
2909 /* Emit DW_LNE_set_address; the operand is the label index. */
2910 LI_set_address,
2911
2912 /* Emit a row to the matrix with the given line. This may be done
2913 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2914 special opcodes. */
2915 LI_set_line,
2916
2917 /* Emit a DW_LNS_set_file. */
2918 LI_set_file,
2919
2920 /* Emit a DW_LNS_set_column. */
2921 LI_set_column,
2922
2923 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2924 LI_negate_stmt,
2925
2926 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2927 LI_set_prologue_end,
2928 LI_set_epilogue_begin,
2929
2930 /* Emit a DW_LNE_set_discriminator. */
2931 LI_set_discriminator,
2932
2933 /* Output a Fixed Advance PC; the target PC is the label index; the
2934 base PC is the previous LI_adv_address or LI_set_address entry.
2935 We only use this when emitting debug views without assembler
2936 support, at explicit user request. Ideally, we should only use
2937 it when the offset might be zero but we can't tell: it's the only
2938 way to maybe change the PC without resetting the view number. */
2939 LI_adv_address
2940 };
2941
2942 typedef struct GTY(()) dw_line_info_struct {
2943 enum dw_line_info_opcode opcode;
2944 unsigned int val;
2945 } dw_line_info_entry;
2946
2947
2948 struct GTY(()) dw_line_info_table {
2949 /* The label that marks the end of this section. */
2950 const char *end_label;
2951
2952 /* The values for the last row of the matrix, as collected in the table.
2953 These are used to minimize the changes to the next row. */
2954 unsigned int file_num;
2955 unsigned int line_num;
2956 unsigned int column_num;
2957 int discrim_num;
2958 bool is_stmt;
2959 bool in_use;
2960
2961 /* This denotes the NEXT view number.
2962
2963 If it is 0, it is known that the NEXT view will be the first view
2964 at the given PC.
2965
2966 If it is -1, we're forcing the view number to be reset, e.g. at a
2967 function entry.
2968
2969 The meaning of other nonzero values depends on whether we're
2970 computing views internally or leaving it for the assembler to do
2971 so. If we're emitting them internally, view denotes the view
2972 number since the last known advance of PC. If we're leaving it
2973 for the assembler, it denotes the LVU label number that we're
2974 going to ask the assembler to assign. */
2975 var_loc_view view;
2976
2977 /* This counts the number of symbolic views emitted in this table
2978 since the latest view reset. Its max value, over all tables,
2979 sets symview_upper_bound. */
2980 var_loc_view symviews_since_reset;
2981
2982 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
2983 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
2984 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
2985 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
2986
2987 vec<dw_line_info_entry, va_gc> *entries;
2988 };
2989
2990 /* This is an upper bound for view numbers that the assembler may
2991 assign to symbolic views output in this translation. It is used to
2992 decide how big a field to use to represent view numbers in
2993 symview-classed attributes. */
2994
2995 static var_loc_view symview_upper_bound;
2996
2997 /* If we're keep track of location views and their reset points, and
2998 INSN is a reset point (i.e., it necessarily advances the PC), mark
2999 the next view in TABLE as reset. */
3000
3001 static void
3002 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3003 {
3004 if (!debug_internal_reset_location_views)
3005 return;
3006
3007 /* Maybe turn (part of?) this test into a default target hook. */
3008 int reset = 0;
3009
3010 if (targetm.reset_location_view)
3011 reset = targetm.reset_location_view (insn);
3012
3013 if (reset)
3014 ;
3015 else if (JUMP_TABLE_DATA_P (insn))
3016 reset = 1;
3017 else if (GET_CODE (insn) == USE
3018 || GET_CODE (insn) == CLOBBER
3019 || GET_CODE (insn) == ASM_INPUT
3020 || asm_noperands (insn) >= 0)
3021 ;
3022 else if (get_attr_min_length (insn) > 0)
3023 reset = 1;
3024
3025 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3026 RESET_NEXT_VIEW (table->view);
3027 }
3028
3029 /* Each DIE attribute has a field specifying the attribute kind,
3030 a link to the next attribute in the chain, and an attribute value.
3031 Attributes are typically linked below the DIE they modify. */
3032
3033 typedef struct GTY(()) dw_attr_struct {
3034 enum dwarf_attribute dw_attr;
3035 dw_val_node dw_attr_val;
3036 }
3037 dw_attr_node;
3038
3039
3040 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3041 The children of each node form a circular list linked by
3042 die_sib. die_child points to the node *before* the "first" child node. */
3043
3044 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3045 union die_symbol_or_type_node
3046 {
3047 const char * GTY ((tag ("0"))) die_symbol;
3048 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3049 }
3050 GTY ((desc ("%0.comdat_type_p"))) die_id;
3051 vec<dw_attr_node, va_gc> *die_attr;
3052 dw_die_ref die_parent;
3053 dw_die_ref die_child;
3054 dw_die_ref die_sib;
3055 dw_die_ref die_definition; /* ref from a specification to its definition */
3056 dw_offset die_offset;
3057 unsigned long die_abbrev;
3058 int die_mark;
3059 unsigned int decl_id;
3060 enum dwarf_tag die_tag;
3061 /* Die is used and must not be pruned as unused. */
3062 BOOL_BITFIELD die_perennial_p : 1;
3063 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3064 /* For an external ref to die_symbol if die_offset contains an extra
3065 offset to that symbol. */
3066 BOOL_BITFIELD with_offset : 1;
3067 /* Whether this DIE was removed from the DIE tree, for example via
3068 prune_unused_types. We don't consider those present from the
3069 DIE lookup routines. */
3070 BOOL_BITFIELD removed : 1;
3071 /* Lots of spare bits. */
3072 }
3073 die_node;
3074
3075 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3076 static bool early_dwarf;
3077 static bool early_dwarf_finished;
3078 struct set_early_dwarf {
3079 bool saved;
3080 set_early_dwarf () : saved(early_dwarf)
3081 {
3082 gcc_assert (! early_dwarf_finished);
3083 early_dwarf = true;
3084 }
3085 ~set_early_dwarf () { early_dwarf = saved; }
3086 };
3087
3088 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3089 #define FOR_EACH_CHILD(die, c, expr) do { \
3090 c = die->die_child; \
3091 if (c) do { \
3092 c = c->die_sib; \
3093 expr; \
3094 } while (c != die->die_child); \
3095 } while (0)
3096
3097 /* The pubname structure */
3098
3099 typedef struct GTY(()) pubname_struct {
3100 dw_die_ref die;
3101 const char *name;
3102 }
3103 pubname_entry;
3104
3105
3106 struct GTY(()) dw_ranges {
3107 const char *label;
3108 /* If this is positive, it's a block number, otherwise it's a
3109 bitwise-negated index into dw_ranges_by_label. */
3110 int num;
3111 /* Index for the range list for DW_FORM_rnglistx. */
3112 unsigned int idx : 31;
3113 /* True if this range might be possibly in a different section
3114 from previous entry. */
3115 unsigned int maybe_new_sec : 1;
3116 };
3117
3118 /* A structure to hold a macinfo entry. */
3119
3120 typedef struct GTY(()) macinfo_struct {
3121 unsigned char code;
3122 unsigned HOST_WIDE_INT lineno;
3123 const char *info;
3124 }
3125 macinfo_entry;
3126
3127
3128 struct GTY(()) dw_ranges_by_label {
3129 const char *begin;
3130 const char *end;
3131 };
3132
3133 /* The comdat type node structure. */
3134 struct GTY(()) comdat_type_node
3135 {
3136 dw_die_ref root_die;
3137 dw_die_ref type_die;
3138 dw_die_ref skeleton_die;
3139 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3140 comdat_type_node *next;
3141 };
3142
3143 /* A list of DIEs for which we can't determine ancestry (parent_die
3144 field) just yet. Later in dwarf2out_finish we will fill in the
3145 missing bits. */
3146 typedef struct GTY(()) limbo_die_struct {
3147 dw_die_ref die;
3148 /* The tree for which this DIE was created. We use this to
3149 determine ancestry later. */
3150 tree created_for;
3151 struct limbo_die_struct *next;
3152 }
3153 limbo_die_node;
3154
3155 typedef struct skeleton_chain_struct
3156 {
3157 dw_die_ref old_die;
3158 dw_die_ref new_die;
3159 struct skeleton_chain_struct *parent;
3160 }
3161 skeleton_chain_node;
3162
3163 /* Define a macro which returns nonzero for a TYPE_DECL which was
3164 implicitly generated for a type.
3165
3166 Note that, unlike the C front-end (which generates a NULL named
3167 TYPE_DECL node for each complete tagged type, each array type,
3168 and each function type node created) the C++ front-end generates
3169 a _named_ TYPE_DECL node for each tagged type node created.
3170 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3171 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3172 front-end, but for each type, tagged or not. */
3173
3174 #define TYPE_DECL_IS_STUB(decl) \
3175 (DECL_NAME (decl) == NULL_TREE \
3176 || (DECL_ARTIFICIAL (decl) \
3177 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3178 /* This is necessary for stub decls that \
3179 appear in nested inline functions. */ \
3180 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3181 && (decl_ultimate_origin (decl) \
3182 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3183
3184 /* Information concerning the compilation unit's programming
3185 language, and compiler version. */
3186
3187 /* Fixed size portion of the DWARF compilation unit header. */
3188 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3189 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3190 + (dwarf_version >= 5 ? 4 : 3))
3191
3192 /* Fixed size portion of the DWARF comdat type unit header. */
3193 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3194 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3195 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3196
3197 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3198 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3199 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3200
3201 /* Fixed size portion of public names info. */
3202 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3203
3204 /* Fixed size portion of the address range info. */
3205 #define DWARF_ARANGES_HEADER_SIZE \
3206 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3207 DWARF2_ADDR_SIZE * 2) \
3208 - DWARF_INITIAL_LENGTH_SIZE)
3209
3210 /* Size of padding portion in the address range info. It must be
3211 aligned to twice the pointer size. */
3212 #define DWARF_ARANGES_PAD_SIZE \
3213 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3214 DWARF2_ADDR_SIZE * 2) \
3215 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3216
3217 /* Use assembler line directives if available. */
3218 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3219 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3220 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3221 #else
3222 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3223 #endif
3224 #endif
3225
3226 /* Use assembler views in line directives if available. */
3227 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3228 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3229 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3230 #else
3231 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3232 #endif
3233 #endif
3234
3235 /* Return true if GCC configure detected assembler support for .loc. */
3236
3237 bool
3238 dwarf2out_default_as_loc_support (void)
3239 {
3240 return DWARF2_ASM_LINE_DEBUG_INFO;
3241 #if (GCC_VERSION >= 3000)
3242 # undef DWARF2_ASM_LINE_DEBUG_INFO
3243 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3244 #endif
3245 }
3246
3247 /* Return true if GCC configure detected assembler support for views
3248 in .loc directives. */
3249
3250 bool
3251 dwarf2out_default_as_locview_support (void)
3252 {
3253 return DWARF2_ASM_VIEW_DEBUG_INFO;
3254 #if (GCC_VERSION >= 3000)
3255 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3256 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3257 #endif
3258 }
3259
3260 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3261 view computation, and it refers to a view identifier for which we
3262 will not emit a label because it is known to map to a view number
3263 zero. We won't allocate the bitmap if we're not using assembler
3264 support for location views, but we have to make the variable
3265 visible for GGC and for code that will be optimized out for lack of
3266 support but that's still parsed and compiled. We could abstract it
3267 out with macros, but it's not worth it. */
3268 static GTY(()) bitmap zero_view_p;
3269
3270 /* Evaluate to TRUE iff N is known to identify the first location view
3271 at its PC. When not using assembler location view computation,
3272 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3273 and views label numbers recorded in it are the ones known to be
3274 zero. */
3275 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3276 || (N) == (var_loc_view)-1 \
3277 || (zero_view_p \
3278 && bitmap_bit_p (zero_view_p, (N))))
3279
3280 /* Return true iff we're to emit .loc directives for the assembler to
3281 generate line number sections.
3282
3283 When we're not emitting views, all we need from the assembler is
3284 support for .loc directives.
3285
3286 If we are emitting views, we can only use the assembler's .loc
3287 support if it also supports views.
3288
3289 When the compiler is emitting the line number programs and
3290 computing view numbers itself, it resets view numbers at known PC
3291 changes and counts from that, and then it emits view numbers as
3292 literal constants in locviewlists. There are cases in which the
3293 compiler is not sure about PC changes, e.g. when extra alignment is
3294 requested for a label. In these cases, the compiler may not reset
3295 the view counter, and the potential PC advance in the line number
3296 program will use an opcode that does not reset the view counter
3297 even if the PC actually changes, so that compiler and debug info
3298 consumer can keep view numbers in sync.
3299
3300 When the compiler defers view computation to the assembler, it
3301 emits symbolic view numbers in locviewlists, with the exception of
3302 views known to be zero (forced resets, or reset after
3303 compiler-visible PC changes): instead of emitting symbols for
3304 these, we emit literal zero and assert the assembler agrees with
3305 the compiler's assessment. We could use symbolic views everywhere,
3306 instead of special-casing zero views, but then we'd be unable to
3307 optimize out locviewlists that contain only zeros. */
3308
3309 static bool
3310 output_asm_line_debug_info (void)
3311 {
3312 return (dwarf2out_as_loc_support
3313 && (dwarf2out_as_locview_support
3314 || !debug_variable_location_views));
3315 }
3316
3317 /* Minimum line offset in a special line info. opcode.
3318 This value was chosen to give a reasonable range of values. */
3319 #define DWARF_LINE_BASE -10
3320
3321 /* First special line opcode - leave room for the standard opcodes. */
3322 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3323
3324 /* Range of line offsets in a special line info. opcode. */
3325 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3326
3327 /* Flag that indicates the initial value of the is_stmt_start flag.
3328 In the present implementation, we do not mark any lines as
3329 the beginning of a source statement, because that information
3330 is not made available by the GCC front-end. */
3331 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3332
3333 /* Maximum number of operations per instruction bundle. */
3334 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3335 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3336 #endif
3337
3338 /* This location is used by calc_die_sizes() to keep track
3339 the offset of each DIE within the .debug_info section. */
3340 static unsigned long next_die_offset;
3341
3342 /* Record the root of the DIE's built for the current compilation unit. */
3343 static GTY(()) dw_die_ref single_comp_unit_die;
3344
3345 /* A list of type DIEs that have been separated into comdat sections. */
3346 static GTY(()) comdat_type_node *comdat_type_list;
3347
3348 /* A list of CU DIEs that have been separated. */
3349 static GTY(()) limbo_die_node *cu_die_list;
3350
3351 /* A list of DIEs with a NULL parent waiting to be relocated. */
3352 static GTY(()) limbo_die_node *limbo_die_list;
3353
3354 /* A list of DIEs for which we may have to generate
3355 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3356 static GTY(()) limbo_die_node *deferred_asm_name;
3357
3358 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3359 {
3360 typedef const char *compare_type;
3361
3362 static hashval_t hash (dwarf_file_data *);
3363 static bool equal (dwarf_file_data *, const char *);
3364 };
3365
3366 /* Filenames referenced by this compilation unit. */
3367 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3368
3369 struct decl_die_hasher : ggc_ptr_hash<die_node>
3370 {
3371 typedef tree compare_type;
3372
3373 static hashval_t hash (die_node *);
3374 static bool equal (die_node *, tree);
3375 };
3376 /* A hash table of references to DIE's that describe declarations.
3377 The key is a DECL_UID() which is a unique number identifying each decl. */
3378 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3379
3380 struct GTY ((for_user)) variable_value_struct {
3381 unsigned int decl_id;
3382 vec<dw_die_ref, va_gc> *dies;
3383 };
3384
3385 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3386 {
3387 typedef tree compare_type;
3388
3389 static hashval_t hash (variable_value_struct *);
3390 static bool equal (variable_value_struct *, tree);
3391 };
3392 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3393 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3394 DECL_CONTEXT of the referenced VAR_DECLs. */
3395 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3396
3397 struct block_die_hasher : ggc_ptr_hash<die_struct>
3398 {
3399 static hashval_t hash (die_struct *);
3400 static bool equal (die_struct *, die_struct *);
3401 };
3402
3403 /* A hash table of references to DIE's that describe COMMON blocks.
3404 The key is DECL_UID() ^ die_parent. */
3405 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3406
3407 typedef struct GTY(()) die_arg_entry_struct {
3408 dw_die_ref die;
3409 tree arg;
3410 } die_arg_entry;
3411
3412
3413 /* Node of the variable location list. */
3414 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3415 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3416 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3417 in mode of the EXPR_LIST node and first EXPR_LIST operand
3418 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3419 location or NULL for padding. For larger bitsizes,
3420 mode is 0 and first operand is a CONCAT with bitsize
3421 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3422 NULL as second operand. */
3423 rtx GTY (()) loc;
3424 const char * GTY (()) label;
3425 struct var_loc_node * GTY (()) next;
3426 var_loc_view view;
3427 };
3428
3429 /* Variable location list. */
3430 struct GTY ((for_user)) var_loc_list_def {
3431 struct var_loc_node * GTY (()) first;
3432
3433 /* Pointer to the last but one or last element of the
3434 chained list. If the list is empty, both first and
3435 last are NULL, if the list contains just one node
3436 or the last node certainly is not redundant, it points
3437 to the last node, otherwise points to the last but one.
3438 Do not mark it for GC because it is marked through the chain. */
3439 struct var_loc_node * GTY ((skip ("%h"))) last;
3440
3441 /* Pointer to the last element before section switch,
3442 if NULL, either sections weren't switched or first
3443 is after section switch. */
3444 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3445
3446 /* DECL_UID of the variable decl. */
3447 unsigned int decl_id;
3448 };
3449 typedef struct var_loc_list_def var_loc_list;
3450
3451 /* Call argument location list. */
3452 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3453 rtx GTY (()) call_arg_loc_note;
3454 const char * GTY (()) label;
3455 tree GTY (()) block;
3456 bool tail_call_p;
3457 rtx GTY (()) symbol_ref;
3458 struct call_arg_loc_node * GTY (()) next;
3459 };
3460
3461
3462 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3463 {
3464 typedef const_tree compare_type;
3465
3466 static hashval_t hash (var_loc_list *);
3467 static bool equal (var_loc_list *, const_tree);
3468 };
3469
3470 /* Table of decl location linked lists. */
3471 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3472
3473 /* Head and tail of call_arg_loc chain. */
3474 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3475 static struct call_arg_loc_node *call_arg_loc_last;
3476
3477 /* Number of call sites in the current function. */
3478 static int call_site_count = -1;
3479 /* Number of tail call sites in the current function. */
3480 static int tail_call_site_count = -1;
3481
3482 /* A cached location list. */
3483 struct GTY ((for_user)) cached_dw_loc_list_def {
3484 /* The DECL_UID of the decl that this entry describes. */
3485 unsigned int decl_id;
3486
3487 /* The cached location list. */
3488 dw_loc_list_ref loc_list;
3489 };
3490 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3491
3492 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3493 {
3494
3495 typedef const_tree compare_type;
3496
3497 static hashval_t hash (cached_dw_loc_list *);
3498 static bool equal (cached_dw_loc_list *, const_tree);
3499 };
3500
3501 /* Table of cached location lists. */
3502 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3503
3504 /* A vector of references to DIE's that are uniquely identified by their tag,
3505 presence/absence of children DIE's, and list of attribute/value pairs. */
3506 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3507
3508 /* A hash map to remember the stack usage for DWARF procedures. The value
3509 stored is the stack size difference between before the DWARF procedure
3510 invokation and after it returned. In other words, for a DWARF procedure
3511 that consumes N stack slots and that pushes M ones, this stores M - N. */
3512 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3513
3514 /* A global counter for generating labels for line number data. */
3515 static unsigned int line_info_label_num;
3516
3517 /* The current table to which we should emit line number information
3518 for the current function. This will be set up at the beginning of
3519 assembly for the function. */
3520 static GTY(()) dw_line_info_table *cur_line_info_table;
3521
3522 /* The two default tables of line number info. */
3523 static GTY(()) dw_line_info_table *text_section_line_info;
3524 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3525
3526 /* The set of all non-default tables of line number info. */
3527 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3528
3529 /* A flag to tell pubnames/types export if there is an info section to
3530 refer to. */
3531 static bool info_section_emitted;
3532
3533 /* A pointer to the base of a table that contains a list of publicly
3534 accessible names. */
3535 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3536
3537 /* A pointer to the base of a table that contains a list of publicly
3538 accessible types. */
3539 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3540
3541 /* A pointer to the base of a table that contains a list of macro
3542 defines/undefines (and file start/end markers). */
3543 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3544
3545 /* True if .debug_macinfo or .debug_macros section is going to be
3546 emitted. */
3547 #define have_macinfo \
3548 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3549 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3550 && !macinfo_table->is_empty ())
3551
3552 /* Vector of dies for which we should generate .debug_ranges info. */
3553 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3554
3555 /* Vector of pairs of labels referenced in ranges_table. */
3556 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3557
3558 /* Whether we have location lists that need outputting */
3559 static GTY(()) bool have_location_lists;
3560
3561 /* Unique label counter. */
3562 static GTY(()) unsigned int loclabel_num;
3563
3564 /* Unique label counter for point-of-call tables. */
3565 static GTY(()) unsigned int poc_label_num;
3566
3567 /* The last file entry emitted by maybe_emit_file(). */
3568 static GTY(()) struct dwarf_file_data * last_emitted_file;
3569
3570 /* Number of internal labels generated by gen_internal_sym(). */
3571 static GTY(()) int label_num;
3572
3573 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3574
3575 /* Instances of generic types for which we need to generate debug
3576 info that describe their generic parameters and arguments. That
3577 generation needs to happen once all types are properly laid out so
3578 we do it at the end of compilation. */
3579 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3580
3581 /* Offset from the "steady-state frame pointer" to the frame base,
3582 within the current function. */
3583 static poly_int64 frame_pointer_fb_offset;
3584 static bool frame_pointer_fb_offset_valid;
3585
3586 static vec<dw_die_ref> base_types;
3587
3588 /* Flags to represent a set of attribute classes for attributes that represent
3589 a scalar value (bounds, pointers, ...). */
3590 enum dw_scalar_form
3591 {
3592 dw_scalar_form_constant = 0x01,
3593 dw_scalar_form_exprloc = 0x02,
3594 dw_scalar_form_reference = 0x04
3595 };
3596
3597 /* Forward declarations for functions defined in this file. */
3598
3599 static int is_pseudo_reg (const_rtx);
3600 static tree type_main_variant (tree);
3601 static int is_tagged_type (const_tree);
3602 static const char *dwarf_tag_name (unsigned);
3603 static const char *dwarf_attr_name (unsigned);
3604 static const char *dwarf_form_name (unsigned);
3605 static tree decl_ultimate_origin (const_tree);
3606 static tree decl_class_context (tree);
3607 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3608 static inline enum dw_val_class AT_class (dw_attr_node *);
3609 static inline unsigned int AT_index (dw_attr_node *);
3610 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3611 static inline unsigned AT_flag (dw_attr_node *);
3612 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3613 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3614 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3615 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3616 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3617 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3618 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3619 unsigned int, unsigned char *);
3620 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3621 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3622 static inline const char *AT_string (dw_attr_node *);
3623 static enum dwarf_form AT_string_form (dw_attr_node *);
3624 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3625 static void add_AT_specification (dw_die_ref, dw_die_ref);
3626 static inline dw_die_ref AT_ref (dw_attr_node *);
3627 static inline int AT_ref_external (dw_attr_node *);
3628 static inline void set_AT_ref_external (dw_attr_node *, int);
3629 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3630 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3631 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3632 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3633 dw_loc_list_ref);
3634 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3635 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3636 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3637 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3638 static void remove_addr_table_entry (addr_table_entry *);
3639 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3640 static inline rtx AT_addr (dw_attr_node *);
3641 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3642 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3643 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3644 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3645 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3646 const char *);
3647 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3648 unsigned HOST_WIDE_INT);
3649 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3650 unsigned long, bool);
3651 static inline const char *AT_lbl (dw_attr_node *);
3652 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3653 static const char *get_AT_low_pc (dw_die_ref);
3654 static const char *get_AT_hi_pc (dw_die_ref);
3655 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3656 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3657 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3658 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3659 static bool is_cxx (void);
3660 static bool is_cxx (const_tree);
3661 static bool is_fortran (void);
3662 static bool is_ada (void);
3663 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3664 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3665 static void add_child_die (dw_die_ref, dw_die_ref);
3666 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3667 static dw_die_ref lookup_type_die (tree);
3668 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3669 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3670 static void equate_type_number_to_die (tree, dw_die_ref);
3671 static dw_die_ref lookup_decl_die (tree);
3672 static var_loc_list *lookup_decl_loc (const_tree);
3673 static void equate_decl_number_to_die (tree, dw_die_ref);
3674 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3675 static void print_spaces (FILE *);
3676 static void print_die (dw_die_ref, FILE *);
3677 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3678 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3679 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3680 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3681 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3682 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3683 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3684 struct md5_ctx *, int *);
3685 struct checksum_attributes;
3686 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3687 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3688 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3689 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3690 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3691 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3692 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3693 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3694 static int is_type_die (dw_die_ref);
3695 static int is_comdat_die (dw_die_ref);
3696 static inline bool is_template_instantiation (dw_die_ref);
3697 static int is_declaration_die (dw_die_ref);
3698 static int should_move_die_to_comdat (dw_die_ref);
3699 static dw_die_ref clone_as_declaration (dw_die_ref);
3700 static dw_die_ref clone_die (dw_die_ref);
3701 static dw_die_ref clone_tree (dw_die_ref);
3702 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3703 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3704 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3705 static dw_die_ref generate_skeleton (dw_die_ref);
3706 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3707 dw_die_ref,
3708 dw_die_ref);
3709 static void break_out_comdat_types (dw_die_ref);
3710 static void copy_decls_for_unworthy_types (dw_die_ref);
3711
3712 static void add_sibling_attributes (dw_die_ref);
3713 static void output_location_lists (dw_die_ref);
3714 static int constant_size (unsigned HOST_WIDE_INT);
3715 static unsigned long size_of_die (dw_die_ref);
3716 static void calc_die_sizes (dw_die_ref);
3717 static void calc_base_type_die_sizes (void);
3718 static void mark_dies (dw_die_ref);
3719 static void unmark_dies (dw_die_ref);
3720 static void unmark_all_dies (dw_die_ref);
3721 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3722 static unsigned long size_of_aranges (void);
3723 static enum dwarf_form value_format (dw_attr_node *);
3724 static void output_value_format (dw_attr_node *);
3725 static void output_abbrev_section (void);
3726 static void output_die_abbrevs (unsigned long, dw_die_ref);
3727 static void output_die (dw_die_ref);
3728 static void output_compilation_unit_header (enum dwarf_unit_type);
3729 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3730 static void output_comdat_type_unit (comdat_type_node *);
3731 static const char *dwarf2_name (tree, int);
3732 static void add_pubname (tree, dw_die_ref);
3733 static void add_enumerator_pubname (const char *, dw_die_ref);
3734 static void add_pubname_string (const char *, dw_die_ref);
3735 static void add_pubtype (tree, dw_die_ref);
3736 static void output_pubnames (vec<pubname_entry, va_gc> *);
3737 static void output_aranges (void);
3738 static unsigned int add_ranges (const_tree, bool = false);
3739 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3740 bool *, bool);
3741 static void output_ranges (void);
3742 static dw_line_info_table *new_line_info_table (void);
3743 static void output_line_info (bool);
3744 static void output_file_names (void);
3745 static dw_die_ref base_type_die (tree, bool);
3746 static int is_base_type (tree);
3747 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3748 static int decl_quals (const_tree);
3749 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3750 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3751 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3752 static int type_is_enum (const_tree);
3753 static unsigned int dbx_reg_number (const_rtx);
3754 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3755 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3756 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3757 enum var_init_status);
3758 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3759 enum var_init_status);
3760 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3761 enum var_init_status);
3762 static int is_based_loc (const_rtx);
3763 static bool resolve_one_addr (rtx *);
3764 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3765 enum var_init_status);
3766 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3767 enum var_init_status);
3768 struct loc_descr_context;
3769 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3770 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3771 static dw_loc_list_ref loc_list_from_tree (tree, int,
3772 struct loc_descr_context *);
3773 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3774 struct loc_descr_context *);
3775 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3776 static tree field_type (const_tree);
3777 static unsigned int simple_type_align_in_bits (const_tree);
3778 static unsigned int simple_decl_align_in_bits (const_tree);
3779 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3780 struct vlr_context;
3781 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3782 HOST_WIDE_INT *);
3783 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3784 dw_loc_list_ref);
3785 static void add_data_member_location_attribute (dw_die_ref, tree,
3786 struct vlr_context *);
3787 static bool add_const_value_attribute (dw_die_ref, rtx);
3788 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3789 static void insert_wide_int (const wide_int &, unsigned char *, int);
3790 static void insert_float (const_rtx, unsigned char *);
3791 static rtx rtl_for_decl_location (tree);
3792 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3793 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3794 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3795 static void add_name_attribute (dw_die_ref, const char *);
3796 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3797 static void add_comp_dir_attribute (dw_die_ref);
3798 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3799 struct loc_descr_context *);
3800 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3801 struct loc_descr_context *);
3802 static void add_subscript_info (dw_die_ref, tree, bool);
3803 static void add_byte_size_attribute (dw_die_ref, tree);
3804 static void add_alignment_attribute (dw_die_ref, tree);
3805 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3806 struct vlr_context *);
3807 static void add_bit_size_attribute (dw_die_ref, tree);
3808 static void add_prototyped_attribute (dw_die_ref, tree);
3809 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3810 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3811 static void add_src_coords_attributes (dw_die_ref, tree);
3812 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3813 static void add_discr_value (dw_die_ref, dw_discr_value *);
3814 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3815 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3816 static void push_decl_scope (tree);
3817 static void pop_decl_scope (void);
3818 static dw_die_ref scope_die_for (tree, dw_die_ref);
3819 static inline int local_scope_p (dw_die_ref);
3820 static inline int class_scope_p (dw_die_ref);
3821 static inline int class_or_namespace_scope_p (dw_die_ref);
3822 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3823 static void add_calling_convention_attribute (dw_die_ref, tree);
3824 static const char *type_tag (const_tree);
3825 static tree member_declared_type (const_tree);
3826 #if 0
3827 static const char *decl_start_label (tree);
3828 #endif
3829 static void gen_array_type_die (tree, dw_die_ref);
3830 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3831 #if 0
3832 static void gen_entry_point_die (tree, dw_die_ref);
3833 #endif
3834 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3835 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3836 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3837 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3838 static void gen_formal_types_die (tree, dw_die_ref);
3839 static void gen_subprogram_die (tree, dw_die_ref);
3840 static void gen_variable_die (tree, tree, dw_die_ref);
3841 static void gen_const_die (tree, dw_die_ref);
3842 static void gen_label_die (tree, dw_die_ref);
3843 static void gen_lexical_block_die (tree, dw_die_ref);
3844 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3845 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3846 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3847 static dw_die_ref gen_compile_unit_die (const char *);
3848 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3849 static void gen_member_die (tree, dw_die_ref);
3850 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3851 enum debug_info_usage);
3852 static void gen_subroutine_type_die (tree, dw_die_ref);
3853 static void gen_typedef_die (tree, dw_die_ref);
3854 static void gen_type_die (tree, dw_die_ref);
3855 static void gen_block_die (tree, dw_die_ref);
3856 static void decls_for_scope (tree, dw_die_ref);
3857 static bool is_naming_typedef_decl (const_tree);
3858 static inline dw_die_ref get_context_die (tree);
3859 static void gen_namespace_die (tree, dw_die_ref);
3860 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3861 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3862 static dw_die_ref force_decl_die (tree);
3863 static dw_die_ref force_type_die (tree);
3864 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3865 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3866 static struct dwarf_file_data * lookup_filename (const char *);
3867 static void retry_incomplete_types (void);
3868 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3869 static void gen_generic_params_dies (tree);
3870 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3871 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3872 static void splice_child_die (dw_die_ref, dw_die_ref);
3873 static int file_info_cmp (const void *, const void *);
3874 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3875 const char *, var_loc_view, const char *);
3876 static void output_loc_list (dw_loc_list_ref);
3877 static char *gen_internal_sym (const char *);
3878 static bool want_pubnames (void);
3879
3880 static void prune_unmark_dies (dw_die_ref);
3881 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3882 static void prune_unused_types_mark (dw_die_ref, int);
3883 static void prune_unused_types_walk (dw_die_ref);
3884 static void prune_unused_types_walk_attribs (dw_die_ref);
3885 static void prune_unused_types_prune (dw_die_ref);
3886 static void prune_unused_types (void);
3887 static int maybe_emit_file (struct dwarf_file_data *fd);
3888 static inline const char *AT_vms_delta1 (dw_attr_node *);
3889 static inline const char *AT_vms_delta2 (dw_attr_node *);
3890 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3891 const char *, const char *);
3892 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3893 static void gen_remaining_tmpl_value_param_die_attribute (void);
3894 static bool generic_type_p (tree);
3895 static void schedule_generic_params_dies_gen (tree t);
3896 static void gen_scheduled_generic_parms_dies (void);
3897 static void resolve_variable_values (void);
3898
3899 static const char *comp_dir_string (void);
3900
3901 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3902
3903 /* enum for tracking thread-local variables whose address is really an offset
3904 relative to the TLS pointer, which will need link-time relocation, but will
3905 not need relocation by the DWARF consumer. */
3906
3907 enum dtprel_bool
3908 {
3909 dtprel_false = 0,
3910 dtprel_true = 1
3911 };
3912
3913 /* Return the operator to use for an address of a variable. For dtprel_true, we
3914 use DW_OP_const*. For regular variables, which need both link-time
3915 relocation and consumer-level relocation (e.g., to account for shared objects
3916 loaded at a random address), we use DW_OP_addr*. */
3917
3918 static inline enum dwarf_location_atom
3919 dw_addr_op (enum dtprel_bool dtprel)
3920 {
3921 if (dtprel == dtprel_true)
3922 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3923 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3924 else
3925 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3926 }
3927
3928 /* Return a pointer to a newly allocated address location description. If
3929 dwarf_split_debug_info is true, then record the address with the appropriate
3930 relocation. */
3931 static inline dw_loc_descr_ref
3932 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3933 {
3934 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3935
3936 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3937 ref->dw_loc_oprnd1.v.val_addr = addr;
3938 ref->dtprel = dtprel;
3939 if (dwarf_split_debug_info)
3940 ref->dw_loc_oprnd1.val_entry
3941 = add_addr_table_entry (addr,
3942 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3943 else
3944 ref->dw_loc_oprnd1.val_entry = NULL;
3945
3946 return ref;
3947 }
3948
3949 /* Section names used to hold DWARF debugging information. */
3950
3951 #ifndef DEBUG_INFO_SECTION
3952 #define DEBUG_INFO_SECTION ".debug_info"
3953 #endif
3954 #ifndef DEBUG_DWO_INFO_SECTION
3955 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3956 #endif
3957 #ifndef DEBUG_LTO_INFO_SECTION
3958 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3959 #endif
3960 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3961 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3962 #endif
3963 #ifndef DEBUG_ABBREV_SECTION
3964 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3965 #endif
3966 #ifndef DEBUG_LTO_ABBREV_SECTION
3967 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3968 #endif
3969 #ifndef DEBUG_DWO_ABBREV_SECTION
3970 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3971 #endif
3972 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3973 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3974 #endif
3975 #ifndef DEBUG_ARANGES_SECTION
3976 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3977 #endif
3978 #ifndef DEBUG_ADDR_SECTION
3979 #define DEBUG_ADDR_SECTION ".debug_addr"
3980 #endif
3981 #ifndef DEBUG_MACINFO_SECTION
3982 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3983 #endif
3984 #ifndef DEBUG_LTO_MACINFO_SECTION
3985 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3986 #endif
3987 #ifndef DEBUG_DWO_MACINFO_SECTION
3988 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
3989 #endif
3990 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
3991 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
3992 #endif
3993 #ifndef DEBUG_MACRO_SECTION
3994 #define DEBUG_MACRO_SECTION ".debug_macro"
3995 #endif
3996 #ifndef DEBUG_LTO_MACRO_SECTION
3997 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
3998 #endif
3999 #ifndef DEBUG_DWO_MACRO_SECTION
4000 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4001 #endif
4002 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4003 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4004 #endif
4005 #ifndef DEBUG_LINE_SECTION
4006 #define DEBUG_LINE_SECTION ".debug_line"
4007 #endif
4008 #ifndef DEBUG_LTO_LINE_SECTION
4009 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4010 #endif
4011 #ifndef DEBUG_DWO_LINE_SECTION
4012 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4013 #endif
4014 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4015 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4016 #endif
4017 #ifndef DEBUG_LOC_SECTION
4018 #define DEBUG_LOC_SECTION ".debug_loc"
4019 #endif
4020 #ifndef DEBUG_DWO_LOC_SECTION
4021 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4022 #endif
4023 #ifndef DEBUG_LOCLISTS_SECTION
4024 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4025 #endif
4026 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4027 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4028 #endif
4029 #ifndef DEBUG_PUBNAMES_SECTION
4030 #define DEBUG_PUBNAMES_SECTION \
4031 ((debug_generate_pub_sections == 2) \
4032 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4033 #endif
4034 #ifndef DEBUG_PUBTYPES_SECTION
4035 #define DEBUG_PUBTYPES_SECTION \
4036 ((debug_generate_pub_sections == 2) \
4037 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4038 #endif
4039 #ifndef DEBUG_STR_OFFSETS_SECTION
4040 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4041 #endif
4042 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4043 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4044 #endif
4045 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4046 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4047 #endif
4048 #ifndef DEBUG_STR_SECTION
4049 #define DEBUG_STR_SECTION ".debug_str"
4050 #endif
4051 #ifndef DEBUG_LTO_STR_SECTION
4052 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4053 #endif
4054 #ifndef DEBUG_STR_DWO_SECTION
4055 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4056 #endif
4057 #ifndef DEBUG_LTO_STR_DWO_SECTION
4058 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4059 #endif
4060 #ifndef DEBUG_RANGES_SECTION
4061 #define DEBUG_RANGES_SECTION ".debug_ranges"
4062 #endif
4063 #ifndef DEBUG_RNGLISTS_SECTION
4064 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4065 #endif
4066 #ifndef DEBUG_LINE_STR_SECTION
4067 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4068 #endif
4069 #ifndef DEBUG_LTO_LINE_STR_SECTION
4070 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4071 #endif
4072
4073 /* Standard ELF section names for compiled code and data. */
4074 #ifndef TEXT_SECTION_NAME
4075 #define TEXT_SECTION_NAME ".text"
4076 #endif
4077
4078 /* Section flags for .debug_str section. */
4079 #define DEBUG_STR_SECTION_FLAGS \
4080 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4081 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4082 : SECTION_DEBUG)
4083
4084 /* Section flags for .debug_str.dwo section. */
4085 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4086
4087 /* Attribute used to refer to the macro section. */
4088 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4089 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4090
4091 /* Labels we insert at beginning sections we can reference instead of
4092 the section names themselves. */
4093
4094 #ifndef TEXT_SECTION_LABEL
4095 #define TEXT_SECTION_LABEL "Ltext"
4096 #endif
4097 #ifndef COLD_TEXT_SECTION_LABEL
4098 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4099 #endif
4100 #ifndef DEBUG_LINE_SECTION_LABEL
4101 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4102 #endif
4103 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4104 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4105 #endif
4106 #ifndef DEBUG_INFO_SECTION_LABEL
4107 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4108 #endif
4109 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4110 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4111 #endif
4112 #ifndef DEBUG_ABBREV_SECTION_LABEL
4113 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4114 #endif
4115 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4116 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4117 #endif
4118 #ifndef DEBUG_ADDR_SECTION_LABEL
4119 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4120 #endif
4121 #ifndef DEBUG_LOC_SECTION_LABEL
4122 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4123 #endif
4124 #ifndef DEBUG_RANGES_SECTION_LABEL
4125 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4126 #endif
4127 #ifndef DEBUG_MACINFO_SECTION_LABEL
4128 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4129 #endif
4130 #ifndef DEBUG_MACRO_SECTION_LABEL
4131 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4132 #endif
4133 #define SKELETON_COMP_DIE_ABBREV 1
4134 #define SKELETON_TYPE_DIE_ABBREV 2
4135
4136 /* Definitions of defaults for formats and names of various special
4137 (artificial) labels which may be generated within this file (when the -g
4138 options is used and DWARF2_DEBUGGING_INFO is in effect.
4139 If necessary, these may be overridden from within the tm.h file, but
4140 typically, overriding these defaults is unnecessary. */
4141
4142 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4143 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4144 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4145 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4146 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4147 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4148 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4149 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4150 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4151 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4152 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4153 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4154 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4155 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4156 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4157
4158 #ifndef TEXT_END_LABEL
4159 #define TEXT_END_LABEL "Letext"
4160 #endif
4161 #ifndef COLD_END_LABEL
4162 #define COLD_END_LABEL "Letext_cold"
4163 #endif
4164 #ifndef BLOCK_BEGIN_LABEL
4165 #define BLOCK_BEGIN_LABEL "LBB"
4166 #endif
4167 #ifndef BLOCK_INLINE_ENTRY_LABEL
4168 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4169 #endif
4170 #ifndef BLOCK_END_LABEL
4171 #define BLOCK_END_LABEL "LBE"
4172 #endif
4173 #ifndef LINE_CODE_LABEL
4174 #define LINE_CODE_LABEL "LM"
4175 #endif
4176
4177 \f
4178 /* Return the root of the DIE's built for the current compilation unit. */
4179 static dw_die_ref
4180 comp_unit_die (void)
4181 {
4182 if (!single_comp_unit_die)
4183 single_comp_unit_die = gen_compile_unit_die (NULL);
4184 return single_comp_unit_die;
4185 }
4186
4187 /* We allow a language front-end to designate a function that is to be
4188 called to "demangle" any name before it is put into a DIE. */
4189
4190 static const char *(*demangle_name_func) (const char *);
4191
4192 void
4193 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4194 {
4195 demangle_name_func = func;
4196 }
4197
4198 /* Test if rtl node points to a pseudo register. */
4199
4200 static inline int
4201 is_pseudo_reg (const_rtx rtl)
4202 {
4203 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4204 || (GET_CODE (rtl) == SUBREG
4205 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4206 }
4207
4208 /* Return a reference to a type, with its const and volatile qualifiers
4209 removed. */
4210
4211 static inline tree
4212 type_main_variant (tree type)
4213 {
4214 type = TYPE_MAIN_VARIANT (type);
4215
4216 /* ??? There really should be only one main variant among any group of
4217 variants of a given type (and all of the MAIN_VARIANT values for all
4218 members of the group should point to that one type) but sometimes the C
4219 front-end messes this up for array types, so we work around that bug
4220 here. */
4221 if (TREE_CODE (type) == ARRAY_TYPE)
4222 while (type != TYPE_MAIN_VARIANT (type))
4223 type = TYPE_MAIN_VARIANT (type);
4224
4225 return type;
4226 }
4227
4228 /* Return nonzero if the given type node represents a tagged type. */
4229
4230 static inline int
4231 is_tagged_type (const_tree type)
4232 {
4233 enum tree_code code = TREE_CODE (type);
4234
4235 return (code == RECORD_TYPE || code == UNION_TYPE
4236 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4237 }
4238
4239 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4240
4241 static void
4242 get_ref_die_offset_label (char *label, dw_die_ref ref)
4243 {
4244 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4245 }
4246
4247 /* Return die_offset of a DIE reference to a base type. */
4248
4249 static unsigned long int
4250 get_base_type_offset (dw_die_ref ref)
4251 {
4252 if (ref->die_offset)
4253 return ref->die_offset;
4254 if (comp_unit_die ()->die_abbrev)
4255 {
4256 calc_base_type_die_sizes ();
4257 gcc_assert (ref->die_offset);
4258 }
4259 return ref->die_offset;
4260 }
4261
4262 /* Return die_offset of a DIE reference other than base type. */
4263
4264 static unsigned long int
4265 get_ref_die_offset (dw_die_ref ref)
4266 {
4267 gcc_assert (ref->die_offset);
4268 return ref->die_offset;
4269 }
4270
4271 /* Convert a DIE tag into its string name. */
4272
4273 static const char *
4274 dwarf_tag_name (unsigned int tag)
4275 {
4276 const char *name = get_DW_TAG_name (tag);
4277
4278 if (name != NULL)
4279 return name;
4280
4281 return "DW_TAG_<unknown>";
4282 }
4283
4284 /* Convert a DWARF attribute code into its string name. */
4285
4286 static const char *
4287 dwarf_attr_name (unsigned int attr)
4288 {
4289 const char *name;
4290
4291 switch (attr)
4292 {
4293 #if VMS_DEBUGGING_INFO
4294 case DW_AT_HP_prologue:
4295 return "DW_AT_HP_prologue";
4296 #else
4297 case DW_AT_MIPS_loop_unroll_factor:
4298 return "DW_AT_MIPS_loop_unroll_factor";
4299 #endif
4300
4301 #if VMS_DEBUGGING_INFO
4302 case DW_AT_HP_epilogue:
4303 return "DW_AT_HP_epilogue";
4304 #else
4305 case DW_AT_MIPS_stride:
4306 return "DW_AT_MIPS_stride";
4307 #endif
4308 }
4309
4310 name = get_DW_AT_name (attr);
4311
4312 if (name != NULL)
4313 return name;
4314
4315 return "DW_AT_<unknown>";
4316 }
4317
4318 /* Convert a DWARF value form code into its string name. */
4319
4320 static const char *
4321 dwarf_form_name (unsigned int form)
4322 {
4323 const char *name = get_DW_FORM_name (form);
4324
4325 if (name != NULL)
4326 return name;
4327
4328 return "DW_FORM_<unknown>";
4329 }
4330 \f
4331 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4332 instance of an inlined instance of a decl which is local to an inline
4333 function, so we have to trace all of the way back through the origin chain
4334 to find out what sort of node actually served as the original seed for the
4335 given block. */
4336
4337 static tree
4338 decl_ultimate_origin (const_tree decl)
4339 {
4340 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4341 return NULL_TREE;
4342
4343 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4344 we're trying to output the abstract instance of this function. */
4345 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4346 return NULL_TREE;
4347
4348 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4349 most distant ancestor, this should never happen. */
4350 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4351
4352 return DECL_ABSTRACT_ORIGIN (decl);
4353 }
4354
4355 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4356 of a virtual function may refer to a base class, so we check the 'this'
4357 parameter. */
4358
4359 static tree
4360 decl_class_context (tree decl)
4361 {
4362 tree context = NULL_TREE;
4363
4364 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4365 context = DECL_CONTEXT (decl);
4366 else
4367 context = TYPE_MAIN_VARIANT
4368 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4369
4370 if (context && !TYPE_P (context))
4371 context = NULL_TREE;
4372
4373 return context;
4374 }
4375 \f
4376 /* Add an attribute/value pair to a DIE. */
4377
4378 static inline void
4379 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4380 {
4381 /* Maybe this should be an assert? */
4382 if (die == NULL)
4383 return;
4384
4385 if (flag_checking)
4386 {
4387 /* Check we do not add duplicate attrs. Can't use get_AT here
4388 because that recurses to the specification/abstract origin DIE. */
4389 dw_attr_node *a;
4390 unsigned ix;
4391 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4392 gcc_assert (a->dw_attr != attr->dw_attr);
4393 }
4394
4395 vec_safe_reserve (die->die_attr, 1);
4396 vec_safe_push (die->die_attr, *attr);
4397 }
4398
4399 static inline enum dw_val_class
4400 AT_class (dw_attr_node *a)
4401 {
4402 return a->dw_attr_val.val_class;
4403 }
4404
4405 /* Return the index for any attribute that will be referenced with a
4406 DW_FORM_GNU_addr_index or DW_FORM_GNU_str_index. String indices
4407 are stored in dw_attr_val.v.val_str for reference counting
4408 pruning. */
4409
4410 static inline unsigned int
4411 AT_index (dw_attr_node *a)
4412 {
4413 if (AT_class (a) == dw_val_class_str)
4414 return a->dw_attr_val.v.val_str->index;
4415 else if (a->dw_attr_val.val_entry != NULL)
4416 return a->dw_attr_val.val_entry->index;
4417 return NOT_INDEXED;
4418 }
4419
4420 /* Add a flag value attribute to a DIE. */
4421
4422 static inline void
4423 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4424 {
4425 dw_attr_node attr;
4426
4427 attr.dw_attr = attr_kind;
4428 attr.dw_attr_val.val_class = dw_val_class_flag;
4429 attr.dw_attr_val.val_entry = NULL;
4430 attr.dw_attr_val.v.val_flag = flag;
4431 add_dwarf_attr (die, &attr);
4432 }
4433
4434 static inline unsigned
4435 AT_flag (dw_attr_node *a)
4436 {
4437 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4438 return a->dw_attr_val.v.val_flag;
4439 }
4440
4441 /* Add a signed integer attribute value to a DIE. */
4442
4443 static inline void
4444 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4445 {
4446 dw_attr_node attr;
4447
4448 attr.dw_attr = attr_kind;
4449 attr.dw_attr_val.val_class = dw_val_class_const;
4450 attr.dw_attr_val.val_entry = NULL;
4451 attr.dw_attr_val.v.val_int = int_val;
4452 add_dwarf_attr (die, &attr);
4453 }
4454
4455 static inline HOST_WIDE_INT
4456 AT_int (dw_attr_node *a)
4457 {
4458 gcc_assert (a && (AT_class (a) == dw_val_class_const
4459 || AT_class (a) == dw_val_class_const_implicit));
4460 return a->dw_attr_val.v.val_int;
4461 }
4462
4463 /* Add an unsigned integer attribute value to a DIE. */
4464
4465 static inline void
4466 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4467 unsigned HOST_WIDE_INT unsigned_val)
4468 {
4469 dw_attr_node attr;
4470
4471 attr.dw_attr = attr_kind;
4472 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4473 attr.dw_attr_val.val_entry = NULL;
4474 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4475 add_dwarf_attr (die, &attr);
4476 }
4477
4478 static inline unsigned HOST_WIDE_INT
4479 AT_unsigned (dw_attr_node *a)
4480 {
4481 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4482 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4483 return a->dw_attr_val.v.val_unsigned;
4484 }
4485
4486 /* Add an unsigned wide integer attribute value to a DIE. */
4487
4488 static inline void
4489 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4490 const wide_int& w)
4491 {
4492 dw_attr_node attr;
4493
4494 attr.dw_attr = attr_kind;
4495 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4496 attr.dw_attr_val.val_entry = NULL;
4497 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4498 *attr.dw_attr_val.v.val_wide = w;
4499 add_dwarf_attr (die, &attr);
4500 }
4501
4502 /* Add an unsigned double integer attribute value to a DIE. */
4503
4504 static inline void
4505 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4506 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4507 {
4508 dw_attr_node attr;
4509
4510 attr.dw_attr = attr_kind;
4511 attr.dw_attr_val.val_class = dw_val_class_const_double;
4512 attr.dw_attr_val.val_entry = NULL;
4513 attr.dw_attr_val.v.val_double.high = high;
4514 attr.dw_attr_val.v.val_double.low = low;
4515 add_dwarf_attr (die, &attr);
4516 }
4517
4518 /* Add a floating point attribute value to a DIE and return it. */
4519
4520 static inline void
4521 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4522 unsigned int length, unsigned int elt_size, unsigned char *array)
4523 {
4524 dw_attr_node attr;
4525
4526 attr.dw_attr = attr_kind;
4527 attr.dw_attr_val.val_class = dw_val_class_vec;
4528 attr.dw_attr_val.val_entry = NULL;
4529 attr.dw_attr_val.v.val_vec.length = length;
4530 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4531 attr.dw_attr_val.v.val_vec.array = array;
4532 add_dwarf_attr (die, &attr);
4533 }
4534
4535 /* Add an 8-byte data attribute value to a DIE. */
4536
4537 static inline void
4538 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4539 unsigned char data8[8])
4540 {
4541 dw_attr_node attr;
4542
4543 attr.dw_attr = attr_kind;
4544 attr.dw_attr_val.val_class = dw_val_class_data8;
4545 attr.dw_attr_val.val_entry = NULL;
4546 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4547 add_dwarf_attr (die, &attr);
4548 }
4549
4550 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4551 dwarf_split_debug_info, address attributes in dies destined for the
4552 final executable have force_direct set to avoid using indexed
4553 references. */
4554
4555 static inline void
4556 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4557 bool force_direct)
4558 {
4559 dw_attr_node attr;
4560 char * lbl_id;
4561
4562 lbl_id = xstrdup (lbl_low);
4563 attr.dw_attr = DW_AT_low_pc;
4564 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4565 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4566 if (dwarf_split_debug_info && !force_direct)
4567 attr.dw_attr_val.val_entry
4568 = add_addr_table_entry (lbl_id, ate_kind_label);
4569 else
4570 attr.dw_attr_val.val_entry = NULL;
4571 add_dwarf_attr (die, &attr);
4572
4573 attr.dw_attr = DW_AT_high_pc;
4574 if (dwarf_version < 4)
4575 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4576 else
4577 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4578 lbl_id = xstrdup (lbl_high);
4579 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4580 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4581 && dwarf_split_debug_info && !force_direct)
4582 attr.dw_attr_val.val_entry
4583 = add_addr_table_entry (lbl_id, ate_kind_label);
4584 else
4585 attr.dw_attr_val.val_entry = NULL;
4586 add_dwarf_attr (die, &attr);
4587 }
4588
4589 /* Hash and equality functions for debug_str_hash. */
4590
4591 hashval_t
4592 indirect_string_hasher::hash (indirect_string_node *x)
4593 {
4594 return htab_hash_string (x->str);
4595 }
4596
4597 bool
4598 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4599 {
4600 return strcmp (x1->str, x2) == 0;
4601 }
4602
4603 /* Add STR to the given string hash table. */
4604
4605 static struct indirect_string_node *
4606 find_AT_string_in_table (const char *str,
4607 hash_table<indirect_string_hasher> *table)
4608 {
4609 struct indirect_string_node *node;
4610
4611 indirect_string_node **slot
4612 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4613 if (*slot == NULL)
4614 {
4615 node = ggc_cleared_alloc<indirect_string_node> ();
4616 node->str = ggc_strdup (str);
4617 *slot = node;
4618 }
4619 else
4620 node = *slot;
4621
4622 node->refcount++;
4623 return node;
4624 }
4625
4626 /* Add STR to the indirect string hash table. */
4627
4628 static struct indirect_string_node *
4629 find_AT_string (const char *str)
4630 {
4631 if (! debug_str_hash)
4632 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4633
4634 return find_AT_string_in_table (str, debug_str_hash);
4635 }
4636
4637 /* Add a string attribute value to a DIE. */
4638
4639 static inline void
4640 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4641 {
4642 dw_attr_node attr;
4643 struct indirect_string_node *node;
4644
4645 node = find_AT_string (str);
4646
4647 attr.dw_attr = attr_kind;
4648 attr.dw_attr_val.val_class = dw_val_class_str;
4649 attr.dw_attr_val.val_entry = NULL;
4650 attr.dw_attr_val.v.val_str = node;
4651 add_dwarf_attr (die, &attr);
4652 }
4653
4654 static inline const char *
4655 AT_string (dw_attr_node *a)
4656 {
4657 gcc_assert (a && AT_class (a) == dw_val_class_str);
4658 return a->dw_attr_val.v.val_str->str;
4659 }
4660
4661 /* Call this function directly to bypass AT_string_form's logic to put
4662 the string inline in the die. */
4663
4664 static void
4665 set_indirect_string (struct indirect_string_node *node)
4666 {
4667 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4668 /* Already indirect is a no op. */
4669 if (node->form == DW_FORM_strp
4670 || node->form == DW_FORM_line_strp
4671 || node->form == DW_FORM_GNU_str_index)
4672 {
4673 gcc_assert (node->label);
4674 return;
4675 }
4676 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4677 ++dw2_string_counter;
4678 node->label = xstrdup (label);
4679
4680 if (!dwarf_split_debug_info)
4681 {
4682 node->form = DW_FORM_strp;
4683 node->index = NOT_INDEXED;
4684 }
4685 else
4686 {
4687 node->form = DW_FORM_GNU_str_index;
4688 node->index = NO_INDEX_ASSIGNED;
4689 }
4690 }
4691
4692 /* A helper function for dwarf2out_finish, called to reset indirect
4693 string decisions done for early LTO dwarf output before fat object
4694 dwarf output. */
4695
4696 int
4697 reset_indirect_string (indirect_string_node **h, void *)
4698 {
4699 struct indirect_string_node *node = *h;
4700 if (node->form == DW_FORM_strp || node->form == DW_FORM_GNU_str_index)
4701 {
4702 free (node->label);
4703 node->label = NULL;
4704 node->form = (dwarf_form) 0;
4705 node->index = 0;
4706 }
4707 return 1;
4708 }
4709
4710 /* Find out whether a string should be output inline in DIE
4711 or out-of-line in .debug_str section. */
4712
4713 static enum dwarf_form
4714 find_string_form (struct indirect_string_node *node)
4715 {
4716 unsigned int len;
4717
4718 if (node->form)
4719 return node->form;
4720
4721 len = strlen (node->str) + 1;
4722
4723 /* If the string is shorter or equal to the size of the reference, it is
4724 always better to put it inline. */
4725 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4726 return node->form = DW_FORM_string;
4727
4728 /* If we cannot expect the linker to merge strings in .debug_str
4729 section, only put it into .debug_str if it is worth even in this
4730 single module. */
4731 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4732 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4733 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4734 return node->form = DW_FORM_string;
4735
4736 set_indirect_string (node);
4737
4738 return node->form;
4739 }
4740
4741 /* Find out whether the string referenced from the attribute should be
4742 output inline in DIE or out-of-line in .debug_str section. */
4743
4744 static enum dwarf_form
4745 AT_string_form (dw_attr_node *a)
4746 {
4747 gcc_assert (a && AT_class (a) == dw_val_class_str);
4748 return find_string_form (a->dw_attr_val.v.val_str);
4749 }
4750
4751 /* Add a DIE reference attribute value to a DIE. */
4752
4753 static inline void
4754 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4755 {
4756 dw_attr_node attr;
4757 gcc_checking_assert (targ_die != NULL);
4758
4759 /* With LTO we can end up trying to reference something we didn't create
4760 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4761 if (targ_die == NULL)
4762 return;
4763
4764 attr.dw_attr = attr_kind;
4765 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4766 attr.dw_attr_val.val_entry = NULL;
4767 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4768 attr.dw_attr_val.v.val_die_ref.external = 0;
4769 add_dwarf_attr (die, &attr);
4770 }
4771
4772 /* Change DIE reference REF to point to NEW_DIE instead. */
4773
4774 static inline void
4775 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4776 {
4777 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4778 ref->dw_attr_val.v.val_die_ref.die = new_die;
4779 ref->dw_attr_val.v.val_die_ref.external = 0;
4780 }
4781
4782 /* Add an AT_specification attribute to a DIE, and also make the back
4783 pointer from the specification to the definition. */
4784
4785 static inline void
4786 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4787 {
4788 add_AT_die_ref (die, DW_AT_specification, targ_die);
4789 gcc_assert (!targ_die->die_definition);
4790 targ_die->die_definition = die;
4791 }
4792
4793 static inline dw_die_ref
4794 AT_ref (dw_attr_node *a)
4795 {
4796 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4797 return a->dw_attr_val.v.val_die_ref.die;
4798 }
4799
4800 static inline int
4801 AT_ref_external (dw_attr_node *a)
4802 {
4803 if (a && AT_class (a) == dw_val_class_die_ref)
4804 return a->dw_attr_val.v.val_die_ref.external;
4805
4806 return 0;
4807 }
4808
4809 static inline void
4810 set_AT_ref_external (dw_attr_node *a, int i)
4811 {
4812 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4813 a->dw_attr_val.v.val_die_ref.external = i;
4814 }
4815
4816 /* Add an FDE reference attribute value to a DIE. */
4817
4818 static inline void
4819 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4820 {
4821 dw_attr_node attr;
4822
4823 attr.dw_attr = attr_kind;
4824 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4825 attr.dw_attr_val.val_entry = NULL;
4826 attr.dw_attr_val.v.val_fde_index = targ_fde;
4827 add_dwarf_attr (die, &attr);
4828 }
4829
4830 /* Add a location description attribute value to a DIE. */
4831
4832 static inline void
4833 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4834 {
4835 dw_attr_node attr;
4836
4837 attr.dw_attr = attr_kind;
4838 attr.dw_attr_val.val_class = dw_val_class_loc;
4839 attr.dw_attr_val.val_entry = NULL;
4840 attr.dw_attr_val.v.val_loc = loc;
4841 add_dwarf_attr (die, &attr);
4842 }
4843
4844 static inline dw_loc_descr_ref
4845 AT_loc (dw_attr_node *a)
4846 {
4847 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4848 return a->dw_attr_val.v.val_loc;
4849 }
4850
4851 static inline void
4852 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4853 {
4854 dw_attr_node attr;
4855
4856 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4857 return;
4858
4859 attr.dw_attr = attr_kind;
4860 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4861 attr.dw_attr_val.val_entry = NULL;
4862 attr.dw_attr_val.v.val_loc_list = loc_list;
4863 add_dwarf_attr (die, &attr);
4864 have_location_lists = true;
4865 }
4866
4867 static inline dw_loc_list_ref
4868 AT_loc_list (dw_attr_node *a)
4869 {
4870 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4871 return a->dw_attr_val.v.val_loc_list;
4872 }
4873
4874 /* Add a view list attribute to DIE. It must have a DW_AT_location
4875 attribute, because the view list complements the location list. */
4876
4877 static inline void
4878 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4879 {
4880 dw_attr_node attr;
4881
4882 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4883 return;
4884
4885 attr.dw_attr = attr_kind;
4886 attr.dw_attr_val.val_class = dw_val_class_view_list;
4887 attr.dw_attr_val.val_entry = NULL;
4888 attr.dw_attr_val.v.val_view_list = die;
4889 add_dwarf_attr (die, &attr);
4890 gcc_checking_assert (get_AT (die, DW_AT_location));
4891 gcc_assert (have_location_lists);
4892 }
4893
4894 /* Return a pointer to the location list referenced by the attribute.
4895 If the named attribute is a view list, look up the corresponding
4896 DW_AT_location attribute and return its location list. */
4897
4898 static inline dw_loc_list_ref *
4899 AT_loc_list_ptr (dw_attr_node *a)
4900 {
4901 gcc_assert (a);
4902 switch (AT_class (a))
4903 {
4904 case dw_val_class_loc_list:
4905 return &a->dw_attr_val.v.val_loc_list;
4906 case dw_val_class_view_list:
4907 {
4908 dw_attr_node *l;
4909 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4910 if (!l)
4911 return NULL;
4912 gcc_checking_assert (l + 1 == a);
4913 return AT_loc_list_ptr (l);
4914 }
4915 default:
4916 gcc_unreachable ();
4917 }
4918 }
4919
4920 /* Return the location attribute value associated with a view list
4921 attribute value. */
4922
4923 static inline dw_val_node *
4924 view_list_to_loc_list_val_node (dw_val_node *val)
4925 {
4926 gcc_assert (val->val_class == dw_val_class_view_list);
4927 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4928 if (!loc)
4929 return NULL;
4930 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4931 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4932 return &loc->dw_attr_val;
4933 }
4934
4935 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4936 {
4937 static hashval_t hash (addr_table_entry *);
4938 static bool equal (addr_table_entry *, addr_table_entry *);
4939 };
4940
4941 /* Table of entries into the .debug_addr section. */
4942
4943 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4944
4945 /* Hash an address_table_entry. */
4946
4947 hashval_t
4948 addr_hasher::hash (addr_table_entry *a)
4949 {
4950 inchash::hash hstate;
4951 switch (a->kind)
4952 {
4953 case ate_kind_rtx:
4954 hstate.add_int (0);
4955 break;
4956 case ate_kind_rtx_dtprel:
4957 hstate.add_int (1);
4958 break;
4959 case ate_kind_label:
4960 return htab_hash_string (a->addr.label);
4961 default:
4962 gcc_unreachable ();
4963 }
4964 inchash::add_rtx (a->addr.rtl, hstate);
4965 return hstate.end ();
4966 }
4967
4968 /* Determine equality for two address_table_entries. */
4969
4970 bool
4971 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4972 {
4973 if (a1->kind != a2->kind)
4974 return 0;
4975 switch (a1->kind)
4976 {
4977 case ate_kind_rtx:
4978 case ate_kind_rtx_dtprel:
4979 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4980 case ate_kind_label:
4981 return strcmp (a1->addr.label, a2->addr.label) == 0;
4982 default:
4983 gcc_unreachable ();
4984 }
4985 }
4986
4987 /* Initialize an addr_table_entry. */
4988
4989 void
4990 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4991 {
4992 e->kind = kind;
4993 switch (kind)
4994 {
4995 case ate_kind_rtx:
4996 case ate_kind_rtx_dtprel:
4997 e->addr.rtl = (rtx) addr;
4998 break;
4999 case ate_kind_label:
5000 e->addr.label = (char *) addr;
5001 break;
5002 }
5003 e->refcount = 0;
5004 e->index = NO_INDEX_ASSIGNED;
5005 }
5006
5007 /* Add attr to the address table entry to the table. Defer setting an
5008 index until output time. */
5009
5010 static addr_table_entry *
5011 add_addr_table_entry (void *addr, enum ate_kind kind)
5012 {
5013 addr_table_entry *node;
5014 addr_table_entry finder;
5015
5016 gcc_assert (dwarf_split_debug_info);
5017 if (! addr_index_table)
5018 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5019 init_addr_table_entry (&finder, kind, addr);
5020 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5021
5022 if (*slot == HTAB_EMPTY_ENTRY)
5023 {
5024 node = ggc_cleared_alloc<addr_table_entry> ();
5025 init_addr_table_entry (node, kind, addr);
5026 *slot = node;
5027 }
5028 else
5029 node = *slot;
5030
5031 node->refcount++;
5032 return node;
5033 }
5034
5035 /* Remove an entry from the addr table by decrementing its refcount.
5036 Strictly, decrementing the refcount would be enough, but the
5037 assertion that the entry is actually in the table has found
5038 bugs. */
5039
5040 static void
5041 remove_addr_table_entry (addr_table_entry *entry)
5042 {
5043 gcc_assert (dwarf_split_debug_info && addr_index_table);
5044 /* After an index is assigned, the table is frozen. */
5045 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5046 entry->refcount--;
5047 }
5048
5049 /* Given a location list, remove all addresses it refers to from the
5050 address_table. */
5051
5052 static void
5053 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5054 {
5055 for (; descr; descr = descr->dw_loc_next)
5056 if (descr->dw_loc_oprnd1.val_entry != NULL)
5057 {
5058 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5059 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5060 }
5061 }
5062
5063 /* A helper function for dwarf2out_finish called through
5064 htab_traverse. Assign an addr_table_entry its index. All entries
5065 must be collected into the table when this function is called,
5066 because the indexing code relies on htab_traverse to traverse nodes
5067 in the same order for each run. */
5068
5069 int
5070 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5071 {
5072 addr_table_entry *node = *h;
5073
5074 /* Don't index unreferenced nodes. */
5075 if (node->refcount == 0)
5076 return 1;
5077
5078 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5079 node->index = *index;
5080 *index += 1;
5081
5082 return 1;
5083 }
5084
5085 /* Add an address constant attribute value to a DIE. When using
5086 dwarf_split_debug_info, address attributes in dies destined for the
5087 final executable should be direct references--setting the parameter
5088 force_direct ensures this behavior. */
5089
5090 static inline void
5091 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5092 bool force_direct)
5093 {
5094 dw_attr_node attr;
5095
5096 attr.dw_attr = attr_kind;
5097 attr.dw_attr_val.val_class = dw_val_class_addr;
5098 attr.dw_attr_val.v.val_addr = addr;
5099 if (dwarf_split_debug_info && !force_direct)
5100 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5101 else
5102 attr.dw_attr_val.val_entry = NULL;
5103 add_dwarf_attr (die, &attr);
5104 }
5105
5106 /* Get the RTX from to an address DIE attribute. */
5107
5108 static inline rtx
5109 AT_addr (dw_attr_node *a)
5110 {
5111 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5112 return a->dw_attr_val.v.val_addr;
5113 }
5114
5115 /* Add a file attribute value to a DIE. */
5116
5117 static inline void
5118 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5119 struct dwarf_file_data *fd)
5120 {
5121 dw_attr_node attr;
5122
5123 attr.dw_attr = attr_kind;
5124 attr.dw_attr_val.val_class = dw_val_class_file;
5125 attr.dw_attr_val.val_entry = NULL;
5126 attr.dw_attr_val.v.val_file = fd;
5127 add_dwarf_attr (die, &attr);
5128 }
5129
5130 /* Get the dwarf_file_data from a file DIE attribute. */
5131
5132 static inline struct dwarf_file_data *
5133 AT_file (dw_attr_node *a)
5134 {
5135 gcc_assert (a && (AT_class (a) == dw_val_class_file
5136 || AT_class (a) == dw_val_class_file_implicit));
5137 return a->dw_attr_val.v.val_file;
5138 }
5139
5140 /* Add a vms delta attribute value to a DIE. */
5141
5142 static inline void
5143 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5144 const char *lbl1, const char *lbl2)
5145 {
5146 dw_attr_node attr;
5147
5148 attr.dw_attr = attr_kind;
5149 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5150 attr.dw_attr_val.val_entry = NULL;
5151 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5152 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5153 add_dwarf_attr (die, &attr);
5154 }
5155
5156 /* Add a symbolic view identifier attribute value to a DIE. */
5157
5158 static inline void
5159 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5160 const char *view_label)
5161 {
5162 dw_attr_node attr;
5163
5164 attr.dw_attr = attr_kind;
5165 attr.dw_attr_val.val_class = dw_val_class_symview;
5166 attr.dw_attr_val.val_entry = NULL;
5167 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5168 add_dwarf_attr (die, &attr);
5169 }
5170
5171 /* Add a label identifier attribute value to a DIE. */
5172
5173 static inline void
5174 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5175 const char *lbl_id)
5176 {
5177 dw_attr_node attr;
5178
5179 attr.dw_attr = attr_kind;
5180 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5181 attr.dw_attr_val.val_entry = NULL;
5182 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5183 if (dwarf_split_debug_info)
5184 attr.dw_attr_val.val_entry
5185 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5186 ate_kind_label);
5187 add_dwarf_attr (die, &attr);
5188 }
5189
5190 /* Add a section offset attribute value to a DIE, an offset into the
5191 debug_line section. */
5192
5193 static inline void
5194 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5195 const char *label)
5196 {
5197 dw_attr_node attr;
5198
5199 attr.dw_attr = attr_kind;
5200 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5201 attr.dw_attr_val.val_entry = NULL;
5202 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5203 add_dwarf_attr (die, &attr);
5204 }
5205
5206 /* Add a section offset attribute value to a DIE, an offset into the
5207 debug_loclists section. */
5208
5209 static inline void
5210 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5211 const char *label)
5212 {
5213 dw_attr_node attr;
5214
5215 attr.dw_attr = attr_kind;
5216 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
5217 attr.dw_attr_val.val_entry = NULL;
5218 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5219 add_dwarf_attr (die, &attr);
5220 }
5221
5222 /* Add a section offset attribute value to a DIE, an offset into the
5223 debug_macinfo section. */
5224
5225 static inline void
5226 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5227 const char *label)
5228 {
5229 dw_attr_node attr;
5230
5231 attr.dw_attr = attr_kind;
5232 attr.dw_attr_val.val_class = dw_val_class_macptr;
5233 attr.dw_attr_val.val_entry = NULL;
5234 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5235 add_dwarf_attr (die, &attr);
5236 }
5237
5238 /* Add an offset attribute value to a DIE. */
5239
5240 static inline void
5241 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
5242 unsigned HOST_WIDE_INT offset)
5243 {
5244 dw_attr_node attr;
5245
5246 attr.dw_attr = attr_kind;
5247 attr.dw_attr_val.val_class = dw_val_class_offset;
5248 attr.dw_attr_val.val_entry = NULL;
5249 attr.dw_attr_val.v.val_offset = offset;
5250 add_dwarf_attr (die, &attr);
5251 }
5252
5253 /* Add a range_list attribute value to a DIE. When using
5254 dwarf_split_debug_info, address attributes in dies destined for the
5255 final executable should be direct references--setting the parameter
5256 force_direct ensures this behavior. */
5257
5258 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5259 #define RELOCATED_OFFSET (NULL)
5260
5261 static void
5262 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5263 long unsigned int offset, bool force_direct)
5264 {
5265 dw_attr_node attr;
5266
5267 attr.dw_attr = attr_kind;
5268 attr.dw_attr_val.val_class = dw_val_class_range_list;
5269 /* For the range_list attribute, use val_entry to store whether the
5270 offset should follow split-debug-info or normal semantics. This
5271 value is read in output_range_list_offset. */
5272 if (dwarf_split_debug_info && !force_direct)
5273 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5274 else
5275 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5276 attr.dw_attr_val.v.val_offset = offset;
5277 add_dwarf_attr (die, &attr);
5278 }
5279
5280 /* Return the start label of a delta attribute. */
5281
5282 static inline const char *
5283 AT_vms_delta1 (dw_attr_node *a)
5284 {
5285 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5286 return a->dw_attr_val.v.val_vms_delta.lbl1;
5287 }
5288
5289 /* Return the end label of a delta attribute. */
5290
5291 static inline const char *
5292 AT_vms_delta2 (dw_attr_node *a)
5293 {
5294 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5295 return a->dw_attr_val.v.val_vms_delta.lbl2;
5296 }
5297
5298 static inline const char *
5299 AT_lbl (dw_attr_node *a)
5300 {
5301 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5302 || AT_class (a) == dw_val_class_lineptr
5303 || AT_class (a) == dw_val_class_macptr
5304 || AT_class (a) == dw_val_class_loclistsptr
5305 || AT_class (a) == dw_val_class_high_pc));
5306 return a->dw_attr_val.v.val_lbl_id;
5307 }
5308
5309 /* Get the attribute of type attr_kind. */
5310
5311 static dw_attr_node *
5312 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5313 {
5314 dw_attr_node *a;
5315 unsigned ix;
5316 dw_die_ref spec = NULL;
5317
5318 if (! die)
5319 return NULL;
5320
5321 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5322 if (a->dw_attr == attr_kind)
5323 return a;
5324 else if (a->dw_attr == DW_AT_specification
5325 || a->dw_attr == DW_AT_abstract_origin)
5326 spec = AT_ref (a);
5327
5328 if (spec)
5329 return get_AT (spec, attr_kind);
5330
5331 return NULL;
5332 }
5333
5334 /* Returns the parent of the declaration of DIE. */
5335
5336 static dw_die_ref
5337 get_die_parent (dw_die_ref die)
5338 {
5339 dw_die_ref t;
5340
5341 if (!die)
5342 return NULL;
5343
5344 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5345 || (t = get_AT_ref (die, DW_AT_specification)))
5346 die = t;
5347
5348 return die->die_parent;
5349 }
5350
5351 /* Return the "low pc" attribute value, typically associated with a subprogram
5352 DIE. Return null if the "low pc" attribute is either not present, or if it
5353 cannot be represented as an assembler label identifier. */
5354
5355 static inline const char *
5356 get_AT_low_pc (dw_die_ref die)
5357 {
5358 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5359
5360 return a ? AT_lbl (a) : NULL;
5361 }
5362
5363 /* Return the "high pc" attribute value, typically associated with a subprogram
5364 DIE. Return null if the "high pc" attribute is either not present, or if it
5365 cannot be represented as an assembler label identifier. */
5366
5367 static inline const char *
5368 get_AT_hi_pc (dw_die_ref die)
5369 {
5370 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5371
5372 return a ? AT_lbl (a) : NULL;
5373 }
5374
5375 /* Return the value of the string attribute designated by ATTR_KIND, or
5376 NULL if it is not present. */
5377
5378 static inline const char *
5379 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5380 {
5381 dw_attr_node *a = get_AT (die, attr_kind);
5382
5383 return a ? AT_string (a) : NULL;
5384 }
5385
5386 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5387 if it is not present. */
5388
5389 static inline int
5390 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5391 {
5392 dw_attr_node *a = get_AT (die, attr_kind);
5393
5394 return a ? AT_flag (a) : 0;
5395 }
5396
5397 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5398 if it is not present. */
5399
5400 static inline unsigned
5401 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5402 {
5403 dw_attr_node *a = get_AT (die, attr_kind);
5404
5405 return a ? AT_unsigned (a) : 0;
5406 }
5407
5408 static inline dw_die_ref
5409 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5410 {
5411 dw_attr_node *a = get_AT (die, attr_kind);
5412
5413 return a ? AT_ref (a) : NULL;
5414 }
5415
5416 static inline struct dwarf_file_data *
5417 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5418 {
5419 dw_attr_node *a = get_AT (die, attr_kind);
5420
5421 return a ? AT_file (a) : NULL;
5422 }
5423
5424 /* Return TRUE if the language is C++. */
5425
5426 static inline bool
5427 is_cxx (void)
5428 {
5429 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5430
5431 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5432 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5433 }
5434
5435 /* Return TRUE if DECL was created by the C++ frontend. */
5436
5437 static bool
5438 is_cxx (const_tree decl)
5439 {
5440 if (in_lto_p)
5441 {
5442 const_tree context = get_ultimate_context (decl);
5443 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5444 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5445 }
5446 return is_cxx ();
5447 }
5448
5449 /* Return TRUE if the language is Fortran. */
5450
5451 static inline bool
5452 is_fortran (void)
5453 {
5454 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5455
5456 return (lang == DW_LANG_Fortran77
5457 || lang == DW_LANG_Fortran90
5458 || lang == DW_LANG_Fortran95
5459 || lang == DW_LANG_Fortran03
5460 || lang == DW_LANG_Fortran08);
5461 }
5462
5463 static inline bool
5464 is_fortran (const_tree decl)
5465 {
5466 if (in_lto_p)
5467 {
5468 const_tree context = get_ultimate_context (decl);
5469 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5470 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5471 "GNU Fortran", 11) == 0
5472 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5473 "GNU F77") == 0);
5474 }
5475 return is_fortran ();
5476 }
5477
5478 /* Return TRUE if the language is Ada. */
5479
5480 static inline bool
5481 is_ada (void)
5482 {
5483 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5484
5485 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5486 }
5487
5488 /* Remove the specified attribute if present. Return TRUE if removal
5489 was successful. */
5490
5491 static bool
5492 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5493 {
5494 dw_attr_node *a;
5495 unsigned ix;
5496
5497 if (! die)
5498 return false;
5499
5500 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5501 if (a->dw_attr == attr_kind)
5502 {
5503 if (AT_class (a) == dw_val_class_str)
5504 if (a->dw_attr_val.v.val_str->refcount)
5505 a->dw_attr_val.v.val_str->refcount--;
5506
5507 /* vec::ordered_remove should help reduce the number of abbrevs
5508 that are needed. */
5509 die->die_attr->ordered_remove (ix);
5510 return true;
5511 }
5512 return false;
5513 }
5514
5515 /* Remove CHILD from its parent. PREV must have the property that
5516 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5517
5518 static void
5519 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5520 {
5521 gcc_assert (child->die_parent == prev->die_parent);
5522 gcc_assert (prev->die_sib == child);
5523 if (prev == child)
5524 {
5525 gcc_assert (child->die_parent->die_child == child);
5526 prev = NULL;
5527 }
5528 else
5529 prev->die_sib = child->die_sib;
5530 if (child->die_parent->die_child == child)
5531 child->die_parent->die_child = prev;
5532 child->die_sib = NULL;
5533 }
5534
5535 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5536 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5537
5538 static void
5539 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5540 {
5541 dw_die_ref parent = old_child->die_parent;
5542
5543 gcc_assert (parent == prev->die_parent);
5544 gcc_assert (prev->die_sib == old_child);
5545
5546 new_child->die_parent = parent;
5547 if (prev == old_child)
5548 {
5549 gcc_assert (parent->die_child == old_child);
5550 new_child->die_sib = new_child;
5551 }
5552 else
5553 {
5554 prev->die_sib = new_child;
5555 new_child->die_sib = old_child->die_sib;
5556 }
5557 if (old_child->die_parent->die_child == old_child)
5558 old_child->die_parent->die_child = new_child;
5559 old_child->die_sib = NULL;
5560 }
5561
5562 /* Move all children from OLD_PARENT to NEW_PARENT. */
5563
5564 static void
5565 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5566 {
5567 dw_die_ref c;
5568 new_parent->die_child = old_parent->die_child;
5569 old_parent->die_child = NULL;
5570 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5571 }
5572
5573 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5574 matches TAG. */
5575
5576 static void
5577 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5578 {
5579 dw_die_ref c;
5580
5581 c = die->die_child;
5582 if (c) do {
5583 dw_die_ref prev = c;
5584 c = c->die_sib;
5585 while (c->die_tag == tag)
5586 {
5587 remove_child_with_prev (c, prev);
5588 c->die_parent = NULL;
5589 /* Might have removed every child. */
5590 if (die->die_child == NULL)
5591 return;
5592 c = prev->die_sib;
5593 }
5594 } while (c != die->die_child);
5595 }
5596
5597 /* Add a CHILD_DIE as the last child of DIE. */
5598
5599 static void
5600 add_child_die (dw_die_ref die, dw_die_ref child_die)
5601 {
5602 /* FIXME this should probably be an assert. */
5603 if (! die || ! child_die)
5604 return;
5605 gcc_assert (die != child_die);
5606
5607 child_die->die_parent = die;
5608 if (die->die_child)
5609 {
5610 child_die->die_sib = die->die_child->die_sib;
5611 die->die_child->die_sib = child_die;
5612 }
5613 else
5614 child_die->die_sib = child_die;
5615 die->die_child = child_die;
5616 }
5617
5618 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5619
5620 static void
5621 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5622 dw_die_ref after_die)
5623 {
5624 gcc_assert (die
5625 && child_die
5626 && after_die
5627 && die->die_child
5628 && die != child_die);
5629
5630 child_die->die_parent = die;
5631 child_die->die_sib = after_die->die_sib;
5632 after_die->die_sib = child_die;
5633 if (die->die_child == after_die)
5634 die->die_child = child_die;
5635 }
5636
5637 /* Unassociate CHILD from its parent, and make its parent be
5638 NEW_PARENT. */
5639
5640 static void
5641 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5642 {
5643 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5644 if (p->die_sib == child)
5645 {
5646 remove_child_with_prev (child, p);
5647 break;
5648 }
5649 add_child_die (new_parent, child);
5650 }
5651
5652 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5653 is the specification, to the end of PARENT's list of children.
5654 This is done by removing and re-adding it. */
5655
5656 static void
5657 splice_child_die (dw_die_ref parent, dw_die_ref child)
5658 {
5659 /* We want the declaration DIE from inside the class, not the
5660 specification DIE at toplevel. */
5661 if (child->die_parent != parent)
5662 {
5663 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5664
5665 if (tmp)
5666 child = tmp;
5667 }
5668
5669 gcc_assert (child->die_parent == parent
5670 || (child->die_parent
5671 == get_AT_ref (parent, DW_AT_specification)));
5672
5673 reparent_child (child, parent);
5674 }
5675
5676 /* Create and return a new die with TAG_VALUE as tag. */
5677
5678 static inline dw_die_ref
5679 new_die_raw (enum dwarf_tag tag_value)
5680 {
5681 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5682 die->die_tag = tag_value;
5683 return die;
5684 }
5685
5686 /* Create and return a new die with a parent of PARENT_DIE. If
5687 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5688 associated tree T must be supplied to determine parenthood
5689 later. */
5690
5691 static inline dw_die_ref
5692 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5693 {
5694 dw_die_ref die = new_die_raw (tag_value);
5695
5696 if (parent_die != NULL)
5697 add_child_die (parent_die, die);
5698 else
5699 {
5700 limbo_die_node *limbo_node;
5701
5702 /* No DIEs created after early dwarf should end up in limbo,
5703 because the limbo list should not persist past LTO
5704 streaming. */
5705 if (tag_value != DW_TAG_compile_unit
5706 /* These are allowed because they're generated while
5707 breaking out COMDAT units late. */
5708 && tag_value != DW_TAG_type_unit
5709 && tag_value != DW_TAG_skeleton_unit
5710 && !early_dwarf
5711 /* Allow nested functions to live in limbo because they will
5712 only temporarily live there, as decls_for_scope will fix
5713 them up. */
5714 && (TREE_CODE (t) != FUNCTION_DECL
5715 || !decl_function_context (t))
5716 /* Same as nested functions above but for types. Types that
5717 are local to a function will be fixed in
5718 decls_for_scope. */
5719 && (!RECORD_OR_UNION_TYPE_P (t)
5720 || !TYPE_CONTEXT (t)
5721 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5722 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5723 especially in the ltrans stage, but once we implement LTO
5724 dwarf streaming, we should remove this exception. */
5725 && !in_lto_p)
5726 {
5727 fprintf (stderr, "symbol ended up in limbo too late:");
5728 debug_generic_stmt (t);
5729 gcc_unreachable ();
5730 }
5731
5732 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5733 limbo_node->die = die;
5734 limbo_node->created_for = t;
5735 limbo_node->next = limbo_die_list;
5736 limbo_die_list = limbo_node;
5737 }
5738
5739 return die;
5740 }
5741
5742 /* Return the DIE associated with the given type specifier. */
5743
5744 static inline dw_die_ref
5745 lookup_type_die (tree type)
5746 {
5747 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5748 if (die && die->removed)
5749 {
5750 TYPE_SYMTAB_DIE (type) = NULL;
5751 return NULL;
5752 }
5753 return die;
5754 }
5755
5756 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5757 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5758 anonymous type instead the one of the naming typedef. */
5759
5760 static inline dw_die_ref
5761 strip_naming_typedef (tree type, dw_die_ref type_die)
5762 {
5763 if (type
5764 && TREE_CODE (type) == RECORD_TYPE
5765 && type_die
5766 && type_die->die_tag == DW_TAG_typedef
5767 && is_naming_typedef_decl (TYPE_NAME (type)))
5768 type_die = get_AT_ref (type_die, DW_AT_type);
5769 return type_die;
5770 }
5771
5772 /* Like lookup_type_die, but if type is an anonymous type named by a
5773 typedef[1], return the DIE of the anonymous type instead the one of
5774 the naming typedef. This is because in gen_typedef_die, we did
5775 equate the anonymous struct named by the typedef with the DIE of
5776 the naming typedef. So by default, lookup_type_die on an anonymous
5777 struct yields the DIE of the naming typedef.
5778
5779 [1]: Read the comment of is_naming_typedef_decl to learn about what
5780 a naming typedef is. */
5781
5782 static inline dw_die_ref
5783 lookup_type_die_strip_naming_typedef (tree type)
5784 {
5785 dw_die_ref die = lookup_type_die (type);
5786 return strip_naming_typedef (type, die);
5787 }
5788
5789 /* Equate a DIE to a given type specifier. */
5790
5791 static inline void
5792 equate_type_number_to_die (tree type, dw_die_ref type_die)
5793 {
5794 TYPE_SYMTAB_DIE (type) = type_die;
5795 }
5796
5797 /* Returns a hash value for X (which really is a die_struct). */
5798
5799 inline hashval_t
5800 decl_die_hasher::hash (die_node *x)
5801 {
5802 return (hashval_t) x->decl_id;
5803 }
5804
5805 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5806
5807 inline bool
5808 decl_die_hasher::equal (die_node *x, tree y)
5809 {
5810 return (x->decl_id == DECL_UID (y));
5811 }
5812
5813 /* Return the DIE associated with a given declaration. */
5814
5815 static inline dw_die_ref
5816 lookup_decl_die (tree decl)
5817 {
5818 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5819 NO_INSERT);
5820 if (!die)
5821 return NULL;
5822 if ((*die)->removed)
5823 {
5824 decl_die_table->clear_slot (die);
5825 return NULL;
5826 }
5827 return *die;
5828 }
5829
5830
5831 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5832 style reference. Return true if we found one refering to a DIE for
5833 DECL, otherwise return false. */
5834
5835 static bool
5836 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5837 unsigned HOST_WIDE_INT *off)
5838 {
5839 dw_die_ref die;
5840
5841 if (flag_wpa && !decl_die_table)
5842 return false;
5843
5844 if (TREE_CODE (decl) == BLOCK)
5845 die = BLOCK_DIE (decl);
5846 else
5847 die = lookup_decl_die (decl);
5848 if (!die)
5849 return false;
5850
5851 /* During WPA stage we currently use DIEs to store the
5852 decl <-> label + offset map. That's quite inefficient but it
5853 works for now. */
5854 if (flag_wpa)
5855 {
5856 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5857 if (!ref)
5858 {
5859 gcc_assert (die == comp_unit_die ());
5860 return false;
5861 }
5862 *off = ref->die_offset;
5863 *sym = ref->die_id.die_symbol;
5864 return true;
5865 }
5866
5867 /* Similar to get_ref_die_offset_label, but using the "correct"
5868 label. */
5869 *off = die->die_offset;
5870 while (die->die_parent)
5871 die = die->die_parent;
5872 /* For the containing CU DIE we compute a die_symbol in
5873 compute_comp_unit_symbol. */
5874 gcc_assert (die->die_tag == DW_TAG_compile_unit
5875 && die->die_id.die_symbol != NULL);
5876 *sym = die->die_id.die_symbol;
5877 return true;
5878 }
5879
5880 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5881
5882 static void
5883 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5884 const char *symbol, HOST_WIDE_INT offset)
5885 {
5886 /* Create a fake DIE that contains the reference. Don't use
5887 new_die because we don't want to end up in the limbo list. */
5888 dw_die_ref ref = new_die_raw (die->die_tag);
5889 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5890 ref->die_offset = offset;
5891 ref->with_offset = 1;
5892 add_AT_die_ref (die, attr_kind, ref);
5893 }
5894
5895 /* Create a DIE for DECL if required and add a reference to a DIE
5896 at SYMBOL + OFFSET which contains attributes dumped early. */
5897
5898 static void
5899 dwarf2out_register_external_die (tree decl, const char *sym,
5900 unsigned HOST_WIDE_INT off)
5901 {
5902 if (debug_info_level == DINFO_LEVEL_NONE)
5903 return;
5904
5905 if (flag_wpa && !decl_die_table)
5906 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5907
5908 dw_die_ref die
5909 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5910 gcc_assert (!die);
5911
5912 tree ctx;
5913 dw_die_ref parent = NULL;
5914 /* Need to lookup a DIE for the decls context - the containing
5915 function or translation unit. */
5916 if (TREE_CODE (decl) == BLOCK)
5917 {
5918 ctx = BLOCK_SUPERCONTEXT (decl);
5919 /* ??? We do not output DIEs for all scopes thus skip as
5920 many DIEs as needed. */
5921 while (TREE_CODE (ctx) == BLOCK
5922 && !BLOCK_DIE (ctx))
5923 ctx = BLOCK_SUPERCONTEXT (ctx);
5924 }
5925 else
5926 ctx = DECL_CONTEXT (decl);
5927 /* Peel types in the context stack. */
5928 while (ctx && TYPE_P (ctx))
5929 ctx = TYPE_CONTEXT (ctx);
5930 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5931 if (debug_info_level <= DINFO_LEVEL_TERSE)
5932 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5933 ctx = DECL_CONTEXT (ctx);
5934 if (ctx)
5935 {
5936 if (TREE_CODE (ctx) == BLOCK)
5937 parent = BLOCK_DIE (ctx);
5938 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5939 /* Keep the 1:1 association during WPA. */
5940 && !flag_wpa)
5941 /* Otherwise all late annotations go to the main CU which
5942 imports the original CUs. */
5943 parent = comp_unit_die ();
5944 else if (TREE_CODE (ctx) == FUNCTION_DECL
5945 && TREE_CODE (decl) != PARM_DECL
5946 && TREE_CODE (decl) != BLOCK)
5947 /* Leave function local entities parent determination to when
5948 we process scope vars. */
5949 ;
5950 else
5951 parent = lookup_decl_die (ctx);
5952 }
5953 else
5954 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5955 Handle this case gracefully by globalizing stuff. */
5956 parent = comp_unit_die ();
5957 /* Create a DIE "stub". */
5958 switch (TREE_CODE (decl))
5959 {
5960 case TRANSLATION_UNIT_DECL:
5961 if (! flag_wpa)
5962 {
5963 die = comp_unit_die ();
5964 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5965 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5966 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5967 to create a DIE for the original CUs. */
5968 return;
5969 }
5970 /* Keep the 1:1 association during WPA. */
5971 die = new_die (DW_TAG_compile_unit, NULL, decl);
5972 break;
5973 case NAMESPACE_DECL:
5974 if (is_fortran (decl))
5975 die = new_die (DW_TAG_module, parent, decl);
5976 else
5977 die = new_die (DW_TAG_namespace, parent, decl);
5978 break;
5979 case FUNCTION_DECL:
5980 die = new_die (DW_TAG_subprogram, parent, decl);
5981 break;
5982 case VAR_DECL:
5983 die = new_die (DW_TAG_variable, parent, decl);
5984 break;
5985 case RESULT_DECL:
5986 die = new_die (DW_TAG_variable, parent, decl);
5987 break;
5988 case PARM_DECL:
5989 die = new_die (DW_TAG_formal_parameter, parent, decl);
5990 break;
5991 case CONST_DECL:
5992 die = new_die (DW_TAG_constant, parent, decl);
5993 break;
5994 case LABEL_DECL:
5995 die = new_die (DW_TAG_label, parent, decl);
5996 break;
5997 case BLOCK:
5998 die = new_die (DW_TAG_lexical_block, parent, decl);
5999 break;
6000 default:
6001 gcc_unreachable ();
6002 }
6003 if (TREE_CODE (decl) == BLOCK)
6004 BLOCK_DIE (decl) = die;
6005 else
6006 equate_decl_number_to_die (decl, die);
6007
6008 /* Add a reference to the DIE providing early debug at $sym + off. */
6009 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6010 }
6011
6012 /* Returns a hash value for X (which really is a var_loc_list). */
6013
6014 inline hashval_t
6015 decl_loc_hasher::hash (var_loc_list *x)
6016 {
6017 return (hashval_t) x->decl_id;
6018 }
6019
6020 /* Return nonzero if decl_id of var_loc_list X is the same as
6021 UID of decl *Y. */
6022
6023 inline bool
6024 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6025 {
6026 return (x->decl_id == DECL_UID (y));
6027 }
6028
6029 /* Return the var_loc list associated with a given declaration. */
6030
6031 static inline var_loc_list *
6032 lookup_decl_loc (const_tree decl)
6033 {
6034 if (!decl_loc_table)
6035 return NULL;
6036 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6037 }
6038
6039 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6040
6041 inline hashval_t
6042 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6043 {
6044 return (hashval_t) x->decl_id;
6045 }
6046
6047 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6048 UID of decl *Y. */
6049
6050 inline bool
6051 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6052 {
6053 return (x->decl_id == DECL_UID (y));
6054 }
6055
6056 /* Equate a DIE to a particular declaration. */
6057
6058 static void
6059 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6060 {
6061 unsigned int decl_id = DECL_UID (decl);
6062
6063 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6064 decl_die->decl_id = decl_id;
6065 }
6066
6067 /* Return how many bits covers PIECE EXPR_LIST. */
6068
6069 static HOST_WIDE_INT
6070 decl_piece_bitsize (rtx piece)
6071 {
6072 int ret = (int) GET_MODE (piece);
6073 if (ret)
6074 return ret;
6075 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6076 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6077 return INTVAL (XEXP (XEXP (piece, 0), 0));
6078 }
6079
6080 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6081
6082 static rtx *
6083 decl_piece_varloc_ptr (rtx piece)
6084 {
6085 if ((int) GET_MODE (piece))
6086 return &XEXP (piece, 0);
6087 else
6088 return &XEXP (XEXP (piece, 0), 1);
6089 }
6090
6091 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6092 Next is the chain of following piece nodes. */
6093
6094 static rtx_expr_list *
6095 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6096 {
6097 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6098 return alloc_EXPR_LIST (bitsize, loc_note, next);
6099 else
6100 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6101 GEN_INT (bitsize),
6102 loc_note), next);
6103 }
6104
6105 /* Return rtx that should be stored into loc field for
6106 LOC_NOTE and BITPOS/BITSIZE. */
6107
6108 static rtx
6109 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6110 HOST_WIDE_INT bitsize)
6111 {
6112 if (bitsize != -1)
6113 {
6114 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6115 if (bitpos != 0)
6116 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6117 }
6118 return loc_note;
6119 }
6120
6121 /* This function either modifies location piece list *DEST in
6122 place (if SRC and INNER is NULL), or copies location piece list
6123 *SRC to *DEST while modifying it. Location BITPOS is modified
6124 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6125 not copied and if needed some padding around it is added.
6126 When modifying in place, DEST should point to EXPR_LIST where
6127 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6128 to the start of the whole list and INNER points to the EXPR_LIST
6129 where earlier pieces cover PIECE_BITPOS bits. */
6130
6131 static void
6132 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6133 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6134 HOST_WIDE_INT bitsize, rtx loc_note)
6135 {
6136 HOST_WIDE_INT diff;
6137 bool copy = inner != NULL;
6138
6139 if (copy)
6140 {
6141 /* First copy all nodes preceding the current bitpos. */
6142 while (src != inner)
6143 {
6144 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6145 decl_piece_bitsize (*src), NULL_RTX);
6146 dest = &XEXP (*dest, 1);
6147 src = &XEXP (*src, 1);
6148 }
6149 }
6150 /* Add padding if needed. */
6151 if (bitpos != piece_bitpos)
6152 {
6153 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6154 copy ? NULL_RTX : *dest);
6155 dest = &XEXP (*dest, 1);
6156 }
6157 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6158 {
6159 gcc_assert (!copy);
6160 /* A piece with correct bitpos and bitsize already exist,
6161 just update the location for it and return. */
6162 *decl_piece_varloc_ptr (*dest) = loc_note;
6163 return;
6164 }
6165 /* Add the piece that changed. */
6166 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6167 dest = &XEXP (*dest, 1);
6168 /* Skip over pieces that overlap it. */
6169 diff = bitpos - piece_bitpos + bitsize;
6170 if (!copy)
6171 src = dest;
6172 while (diff > 0 && *src)
6173 {
6174 rtx piece = *src;
6175 diff -= decl_piece_bitsize (piece);
6176 if (copy)
6177 src = &XEXP (piece, 1);
6178 else
6179 {
6180 *src = XEXP (piece, 1);
6181 free_EXPR_LIST_node (piece);
6182 }
6183 }
6184 /* Add padding if needed. */
6185 if (diff < 0 && *src)
6186 {
6187 if (!copy)
6188 dest = src;
6189 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6190 dest = &XEXP (*dest, 1);
6191 }
6192 if (!copy)
6193 return;
6194 /* Finally copy all nodes following it. */
6195 while (*src)
6196 {
6197 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6198 decl_piece_bitsize (*src), NULL_RTX);
6199 dest = &XEXP (*dest, 1);
6200 src = &XEXP (*src, 1);
6201 }
6202 }
6203
6204 /* Add a variable location node to the linked list for DECL. */
6205
6206 static struct var_loc_node *
6207 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6208 {
6209 unsigned int decl_id;
6210 var_loc_list *temp;
6211 struct var_loc_node *loc = NULL;
6212 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6213
6214 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6215 {
6216 tree realdecl = DECL_DEBUG_EXPR (decl);
6217 if (handled_component_p (realdecl)
6218 || (TREE_CODE (realdecl) == MEM_REF
6219 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6220 {
6221 bool reverse;
6222 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6223 &bitsize, &reverse);
6224 if (!innerdecl
6225 || !DECL_P (innerdecl)
6226 || DECL_IGNORED_P (innerdecl)
6227 || TREE_STATIC (innerdecl)
6228 || bitsize == 0
6229 || bitpos + bitsize > 256)
6230 return NULL;
6231 decl = innerdecl;
6232 }
6233 }
6234
6235 decl_id = DECL_UID (decl);
6236 var_loc_list **slot
6237 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6238 if (*slot == NULL)
6239 {
6240 temp = ggc_cleared_alloc<var_loc_list> ();
6241 temp->decl_id = decl_id;
6242 *slot = temp;
6243 }
6244 else
6245 temp = *slot;
6246
6247 /* For PARM_DECLs try to keep around the original incoming value,
6248 even if that means we'll emit a zero-range .debug_loc entry. */
6249 if (temp->last
6250 && temp->first == temp->last
6251 && TREE_CODE (decl) == PARM_DECL
6252 && NOTE_P (temp->first->loc)
6253 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6254 && DECL_INCOMING_RTL (decl)
6255 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6256 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6257 == GET_CODE (DECL_INCOMING_RTL (decl))
6258 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6259 && (bitsize != -1
6260 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6261 NOTE_VAR_LOCATION_LOC (loc_note))
6262 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6263 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6264 {
6265 loc = ggc_cleared_alloc<var_loc_node> ();
6266 temp->first->next = loc;
6267 temp->last = loc;
6268 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6269 }
6270 else if (temp->last)
6271 {
6272 struct var_loc_node *last = temp->last, *unused = NULL;
6273 rtx *piece_loc = NULL, last_loc_note;
6274 HOST_WIDE_INT piece_bitpos = 0;
6275 if (last->next)
6276 {
6277 last = last->next;
6278 gcc_assert (last->next == NULL);
6279 }
6280 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6281 {
6282 piece_loc = &last->loc;
6283 do
6284 {
6285 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6286 if (piece_bitpos + cur_bitsize > bitpos)
6287 break;
6288 piece_bitpos += cur_bitsize;
6289 piece_loc = &XEXP (*piece_loc, 1);
6290 }
6291 while (*piece_loc);
6292 }
6293 /* TEMP->LAST here is either pointer to the last but one or
6294 last element in the chained list, LAST is pointer to the
6295 last element. */
6296 if (label && strcmp (last->label, label) == 0 && last->view == view)
6297 {
6298 /* For SRA optimized variables if there weren't any real
6299 insns since last note, just modify the last node. */
6300 if (piece_loc != NULL)
6301 {
6302 adjust_piece_list (piece_loc, NULL, NULL,
6303 bitpos, piece_bitpos, bitsize, loc_note);
6304 return NULL;
6305 }
6306 /* If the last note doesn't cover any instructions, remove it. */
6307 if (temp->last != last)
6308 {
6309 temp->last->next = NULL;
6310 unused = last;
6311 last = temp->last;
6312 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6313 }
6314 else
6315 {
6316 gcc_assert (temp->first == temp->last
6317 || (temp->first->next == temp->last
6318 && TREE_CODE (decl) == PARM_DECL));
6319 memset (temp->last, '\0', sizeof (*temp->last));
6320 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6321 return temp->last;
6322 }
6323 }
6324 if (bitsize == -1 && NOTE_P (last->loc))
6325 last_loc_note = last->loc;
6326 else if (piece_loc != NULL
6327 && *piece_loc != NULL_RTX
6328 && piece_bitpos == bitpos
6329 && decl_piece_bitsize (*piece_loc) == bitsize)
6330 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6331 else
6332 last_loc_note = NULL_RTX;
6333 /* If the current location is the same as the end of the list,
6334 and either both or neither of the locations is uninitialized,
6335 we have nothing to do. */
6336 if (last_loc_note == NULL_RTX
6337 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6338 NOTE_VAR_LOCATION_LOC (loc_note)))
6339 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6340 != NOTE_VAR_LOCATION_STATUS (loc_note))
6341 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6342 == VAR_INIT_STATUS_UNINITIALIZED)
6343 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6344 == VAR_INIT_STATUS_UNINITIALIZED))))
6345 {
6346 /* Add LOC to the end of list and update LAST. If the last
6347 element of the list has been removed above, reuse its
6348 memory for the new node, otherwise allocate a new one. */
6349 if (unused)
6350 {
6351 loc = unused;
6352 memset (loc, '\0', sizeof (*loc));
6353 }
6354 else
6355 loc = ggc_cleared_alloc<var_loc_node> ();
6356 if (bitsize == -1 || piece_loc == NULL)
6357 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6358 else
6359 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6360 bitpos, piece_bitpos, bitsize, loc_note);
6361 last->next = loc;
6362 /* Ensure TEMP->LAST will point either to the new last but one
6363 element of the chain, or to the last element in it. */
6364 if (last != temp->last)
6365 temp->last = last;
6366 }
6367 else if (unused)
6368 ggc_free (unused);
6369 }
6370 else
6371 {
6372 loc = ggc_cleared_alloc<var_loc_node> ();
6373 temp->first = loc;
6374 temp->last = loc;
6375 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6376 }
6377 return loc;
6378 }
6379 \f
6380 /* Keep track of the number of spaces used to indent the
6381 output of the debugging routines that print the structure of
6382 the DIE internal representation. */
6383 static int print_indent;
6384
6385 /* Indent the line the number of spaces given by print_indent. */
6386
6387 static inline void
6388 print_spaces (FILE *outfile)
6389 {
6390 fprintf (outfile, "%*s", print_indent, "");
6391 }
6392
6393 /* Print a type signature in hex. */
6394
6395 static inline void
6396 print_signature (FILE *outfile, char *sig)
6397 {
6398 int i;
6399
6400 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6401 fprintf (outfile, "%02x", sig[i] & 0xff);
6402 }
6403
6404 static inline void
6405 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6406 {
6407 if (discr_value->pos)
6408 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6409 else
6410 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6411 }
6412
6413 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6414
6415 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6416 RECURSE, output location descriptor operations. */
6417
6418 static void
6419 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6420 {
6421 switch (val->val_class)
6422 {
6423 case dw_val_class_addr:
6424 fprintf (outfile, "address");
6425 break;
6426 case dw_val_class_offset:
6427 fprintf (outfile, "offset");
6428 break;
6429 case dw_val_class_loc:
6430 fprintf (outfile, "location descriptor");
6431 if (val->v.val_loc == NULL)
6432 fprintf (outfile, " -> <null>\n");
6433 else if (recurse)
6434 {
6435 fprintf (outfile, ":\n");
6436 print_indent += 4;
6437 print_loc_descr (val->v.val_loc, outfile);
6438 print_indent -= 4;
6439 }
6440 else
6441 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6442 break;
6443 case dw_val_class_loc_list:
6444 fprintf (outfile, "location list -> label:%s",
6445 val->v.val_loc_list->ll_symbol);
6446 break;
6447 case dw_val_class_view_list:
6448 val = view_list_to_loc_list_val_node (val);
6449 fprintf (outfile, "location list with views -> labels:%s and %s",
6450 val->v.val_loc_list->ll_symbol,
6451 val->v.val_loc_list->vl_symbol);
6452 break;
6453 case dw_val_class_range_list:
6454 fprintf (outfile, "range list");
6455 break;
6456 case dw_val_class_const:
6457 case dw_val_class_const_implicit:
6458 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6459 break;
6460 case dw_val_class_unsigned_const:
6461 case dw_val_class_unsigned_const_implicit:
6462 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6463 break;
6464 case dw_val_class_const_double:
6465 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6466 HOST_WIDE_INT_PRINT_UNSIGNED")",
6467 val->v.val_double.high,
6468 val->v.val_double.low);
6469 break;
6470 case dw_val_class_wide_int:
6471 {
6472 int i = val->v.val_wide->get_len ();
6473 fprintf (outfile, "constant (");
6474 gcc_assert (i > 0);
6475 if (val->v.val_wide->elt (i - 1) == 0)
6476 fprintf (outfile, "0x");
6477 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6478 val->v.val_wide->elt (--i));
6479 while (--i >= 0)
6480 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6481 val->v.val_wide->elt (i));
6482 fprintf (outfile, ")");
6483 break;
6484 }
6485 case dw_val_class_vec:
6486 fprintf (outfile, "floating-point or vector constant");
6487 break;
6488 case dw_val_class_flag:
6489 fprintf (outfile, "%u", val->v.val_flag);
6490 break;
6491 case dw_val_class_die_ref:
6492 if (val->v.val_die_ref.die != NULL)
6493 {
6494 dw_die_ref die = val->v.val_die_ref.die;
6495
6496 if (die->comdat_type_p)
6497 {
6498 fprintf (outfile, "die -> signature: ");
6499 print_signature (outfile,
6500 die->die_id.die_type_node->signature);
6501 }
6502 else if (die->die_id.die_symbol)
6503 {
6504 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6505 if (die->with_offset)
6506 fprintf (outfile, " + %ld", die->die_offset);
6507 }
6508 else
6509 fprintf (outfile, "die -> %ld", die->die_offset);
6510 fprintf (outfile, " (%p)", (void *) die);
6511 }
6512 else
6513 fprintf (outfile, "die -> <null>");
6514 break;
6515 case dw_val_class_vms_delta:
6516 fprintf (outfile, "delta: @slotcount(%s-%s)",
6517 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6518 break;
6519 case dw_val_class_symview:
6520 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6521 break;
6522 case dw_val_class_lbl_id:
6523 case dw_val_class_lineptr:
6524 case dw_val_class_macptr:
6525 case dw_val_class_loclistsptr:
6526 case dw_val_class_high_pc:
6527 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6528 break;
6529 case dw_val_class_str:
6530 if (val->v.val_str->str != NULL)
6531 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6532 else
6533 fprintf (outfile, "<null>");
6534 break;
6535 case dw_val_class_file:
6536 case dw_val_class_file_implicit:
6537 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6538 val->v.val_file->emitted_number);
6539 break;
6540 case dw_val_class_data8:
6541 {
6542 int i;
6543
6544 for (i = 0; i < 8; i++)
6545 fprintf (outfile, "%02x", val->v.val_data8[i]);
6546 break;
6547 }
6548 case dw_val_class_discr_value:
6549 print_discr_value (outfile, &val->v.val_discr_value);
6550 break;
6551 case dw_val_class_discr_list:
6552 for (dw_discr_list_ref node = val->v.val_discr_list;
6553 node != NULL;
6554 node = node->dw_discr_next)
6555 {
6556 if (node->dw_discr_range)
6557 {
6558 fprintf (outfile, " .. ");
6559 print_discr_value (outfile, &node->dw_discr_lower_bound);
6560 print_discr_value (outfile, &node->dw_discr_upper_bound);
6561 }
6562 else
6563 print_discr_value (outfile, &node->dw_discr_lower_bound);
6564
6565 if (node->dw_discr_next != NULL)
6566 fprintf (outfile, " | ");
6567 }
6568 default:
6569 break;
6570 }
6571 }
6572
6573 /* Likewise, for a DIE attribute. */
6574
6575 static void
6576 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6577 {
6578 print_dw_val (&a->dw_attr_val, recurse, outfile);
6579 }
6580
6581
6582 /* Print the list of operands in the LOC location description to OUTFILE. This
6583 routine is a debugging aid only. */
6584
6585 static void
6586 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6587 {
6588 dw_loc_descr_ref l = loc;
6589
6590 if (loc == NULL)
6591 {
6592 print_spaces (outfile);
6593 fprintf (outfile, "<null>\n");
6594 return;
6595 }
6596
6597 for (l = loc; l != NULL; l = l->dw_loc_next)
6598 {
6599 print_spaces (outfile);
6600 fprintf (outfile, "(%p) %s",
6601 (void *) l,
6602 dwarf_stack_op_name (l->dw_loc_opc));
6603 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6604 {
6605 fprintf (outfile, " ");
6606 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6607 }
6608 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6609 {
6610 fprintf (outfile, ", ");
6611 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6612 }
6613 fprintf (outfile, "\n");
6614 }
6615 }
6616
6617 /* Print the information associated with a given DIE, and its children.
6618 This routine is a debugging aid only. */
6619
6620 static void
6621 print_die (dw_die_ref die, FILE *outfile)
6622 {
6623 dw_attr_node *a;
6624 dw_die_ref c;
6625 unsigned ix;
6626
6627 print_spaces (outfile);
6628 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6629 die->die_offset, dwarf_tag_name (die->die_tag),
6630 (void*) die);
6631 print_spaces (outfile);
6632 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6633 fprintf (outfile, " offset: %ld", die->die_offset);
6634 fprintf (outfile, " mark: %d\n", die->die_mark);
6635
6636 if (die->comdat_type_p)
6637 {
6638 print_spaces (outfile);
6639 fprintf (outfile, " signature: ");
6640 print_signature (outfile, die->die_id.die_type_node->signature);
6641 fprintf (outfile, "\n");
6642 }
6643
6644 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6645 {
6646 print_spaces (outfile);
6647 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6648
6649 print_attribute (a, true, outfile);
6650 fprintf (outfile, "\n");
6651 }
6652
6653 if (die->die_child != NULL)
6654 {
6655 print_indent += 4;
6656 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6657 print_indent -= 4;
6658 }
6659 if (print_indent == 0)
6660 fprintf (outfile, "\n");
6661 }
6662
6663 /* Print the list of operations in the LOC location description. */
6664
6665 DEBUG_FUNCTION void
6666 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6667 {
6668 print_loc_descr (loc, stderr);
6669 }
6670
6671 /* Print the information collected for a given DIE. */
6672
6673 DEBUG_FUNCTION void
6674 debug_dwarf_die (dw_die_ref die)
6675 {
6676 print_die (die, stderr);
6677 }
6678
6679 DEBUG_FUNCTION void
6680 debug (die_struct &ref)
6681 {
6682 print_die (&ref, stderr);
6683 }
6684
6685 DEBUG_FUNCTION void
6686 debug (die_struct *ptr)
6687 {
6688 if (ptr)
6689 debug (*ptr);
6690 else
6691 fprintf (stderr, "<nil>\n");
6692 }
6693
6694
6695 /* Print all DWARF information collected for the compilation unit.
6696 This routine is a debugging aid only. */
6697
6698 DEBUG_FUNCTION void
6699 debug_dwarf (void)
6700 {
6701 print_indent = 0;
6702 print_die (comp_unit_die (), stderr);
6703 }
6704
6705 /* Verify the DIE tree structure. */
6706
6707 DEBUG_FUNCTION void
6708 verify_die (dw_die_ref die)
6709 {
6710 gcc_assert (!die->die_mark);
6711 if (die->die_parent == NULL
6712 && die->die_sib == NULL)
6713 return;
6714 /* Verify the die_sib list is cyclic. */
6715 dw_die_ref x = die;
6716 do
6717 {
6718 x->die_mark = 1;
6719 x = x->die_sib;
6720 }
6721 while (x && !x->die_mark);
6722 gcc_assert (x == die);
6723 x = die;
6724 do
6725 {
6726 /* Verify all dies have the same parent. */
6727 gcc_assert (x->die_parent == die->die_parent);
6728 if (x->die_child)
6729 {
6730 /* Verify the child has the proper parent and recurse. */
6731 gcc_assert (x->die_child->die_parent == x);
6732 verify_die (x->die_child);
6733 }
6734 x->die_mark = 0;
6735 x = x->die_sib;
6736 }
6737 while (x && x->die_mark);
6738 }
6739
6740 /* Sanity checks on DIEs. */
6741
6742 static void
6743 check_die (dw_die_ref die)
6744 {
6745 unsigned ix;
6746 dw_attr_node *a;
6747 bool inline_found = false;
6748 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6749 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6750 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6751 {
6752 switch (a->dw_attr)
6753 {
6754 case DW_AT_inline:
6755 if (a->dw_attr_val.v.val_unsigned)
6756 inline_found = true;
6757 break;
6758 case DW_AT_location:
6759 ++n_location;
6760 break;
6761 case DW_AT_low_pc:
6762 ++n_low_pc;
6763 break;
6764 case DW_AT_high_pc:
6765 ++n_high_pc;
6766 break;
6767 case DW_AT_artificial:
6768 ++n_artificial;
6769 break;
6770 case DW_AT_decl_column:
6771 ++n_decl_column;
6772 break;
6773 case DW_AT_decl_line:
6774 ++n_decl_line;
6775 break;
6776 case DW_AT_decl_file:
6777 ++n_decl_file;
6778 break;
6779 default:
6780 break;
6781 }
6782 }
6783 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6784 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6785 {
6786 fprintf (stderr, "Duplicate attributes in DIE:\n");
6787 debug_dwarf_die (die);
6788 gcc_unreachable ();
6789 }
6790 if (inline_found)
6791 {
6792 /* A debugging information entry that is a member of an abstract
6793 instance tree [that has DW_AT_inline] should not contain any
6794 attributes which describe aspects of the subroutine which vary
6795 between distinct inlined expansions or distinct out-of-line
6796 expansions. */
6797 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6798 gcc_assert (a->dw_attr != DW_AT_low_pc
6799 && a->dw_attr != DW_AT_high_pc
6800 && a->dw_attr != DW_AT_location
6801 && a->dw_attr != DW_AT_frame_base
6802 && a->dw_attr != DW_AT_call_all_calls
6803 && a->dw_attr != DW_AT_GNU_all_call_sites);
6804 }
6805 }
6806 \f
6807 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6808 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6809 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6810
6811 /* Calculate the checksum of a location expression. */
6812
6813 static inline void
6814 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6815 {
6816 int tem;
6817 inchash::hash hstate;
6818 hashval_t hash;
6819
6820 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6821 CHECKSUM (tem);
6822 hash_loc_operands (loc, hstate);
6823 hash = hstate.end();
6824 CHECKSUM (hash);
6825 }
6826
6827 /* Calculate the checksum of an attribute. */
6828
6829 static void
6830 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6831 {
6832 dw_loc_descr_ref loc;
6833 rtx r;
6834
6835 CHECKSUM (at->dw_attr);
6836
6837 /* We don't care that this was compiled with a different compiler
6838 snapshot; if the output is the same, that's what matters. */
6839 if (at->dw_attr == DW_AT_producer)
6840 return;
6841
6842 switch (AT_class (at))
6843 {
6844 case dw_val_class_const:
6845 case dw_val_class_const_implicit:
6846 CHECKSUM (at->dw_attr_val.v.val_int);
6847 break;
6848 case dw_val_class_unsigned_const:
6849 case dw_val_class_unsigned_const_implicit:
6850 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6851 break;
6852 case dw_val_class_const_double:
6853 CHECKSUM (at->dw_attr_val.v.val_double);
6854 break;
6855 case dw_val_class_wide_int:
6856 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6857 get_full_len (*at->dw_attr_val.v.val_wide)
6858 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6859 break;
6860 case dw_val_class_vec:
6861 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6862 (at->dw_attr_val.v.val_vec.length
6863 * at->dw_attr_val.v.val_vec.elt_size));
6864 break;
6865 case dw_val_class_flag:
6866 CHECKSUM (at->dw_attr_val.v.val_flag);
6867 break;
6868 case dw_val_class_str:
6869 CHECKSUM_STRING (AT_string (at));
6870 break;
6871
6872 case dw_val_class_addr:
6873 r = AT_addr (at);
6874 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6875 CHECKSUM_STRING (XSTR (r, 0));
6876 break;
6877
6878 case dw_val_class_offset:
6879 CHECKSUM (at->dw_attr_val.v.val_offset);
6880 break;
6881
6882 case dw_val_class_loc:
6883 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6884 loc_checksum (loc, ctx);
6885 break;
6886
6887 case dw_val_class_die_ref:
6888 die_checksum (AT_ref (at), ctx, mark);
6889 break;
6890
6891 case dw_val_class_fde_ref:
6892 case dw_val_class_vms_delta:
6893 case dw_val_class_symview:
6894 case dw_val_class_lbl_id:
6895 case dw_val_class_lineptr:
6896 case dw_val_class_macptr:
6897 case dw_val_class_loclistsptr:
6898 case dw_val_class_high_pc:
6899 break;
6900
6901 case dw_val_class_file:
6902 case dw_val_class_file_implicit:
6903 CHECKSUM_STRING (AT_file (at)->filename);
6904 break;
6905
6906 case dw_val_class_data8:
6907 CHECKSUM (at->dw_attr_val.v.val_data8);
6908 break;
6909
6910 default:
6911 break;
6912 }
6913 }
6914
6915 /* Calculate the checksum of a DIE. */
6916
6917 static void
6918 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6919 {
6920 dw_die_ref c;
6921 dw_attr_node *a;
6922 unsigned ix;
6923
6924 /* To avoid infinite recursion. */
6925 if (die->die_mark)
6926 {
6927 CHECKSUM (die->die_mark);
6928 return;
6929 }
6930 die->die_mark = ++(*mark);
6931
6932 CHECKSUM (die->die_tag);
6933
6934 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6935 attr_checksum (a, ctx, mark);
6936
6937 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6938 }
6939
6940 #undef CHECKSUM
6941 #undef CHECKSUM_BLOCK
6942 #undef CHECKSUM_STRING
6943
6944 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6945 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6946 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6947 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6948 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6949 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6950 #define CHECKSUM_ATTR(FOO) \
6951 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6952
6953 /* Calculate the checksum of a number in signed LEB128 format. */
6954
6955 static void
6956 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6957 {
6958 unsigned char byte;
6959 bool more;
6960
6961 while (1)
6962 {
6963 byte = (value & 0x7f);
6964 value >>= 7;
6965 more = !((value == 0 && (byte & 0x40) == 0)
6966 || (value == -1 && (byte & 0x40) != 0));
6967 if (more)
6968 byte |= 0x80;
6969 CHECKSUM (byte);
6970 if (!more)
6971 break;
6972 }
6973 }
6974
6975 /* Calculate the checksum of a number in unsigned LEB128 format. */
6976
6977 static void
6978 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6979 {
6980 while (1)
6981 {
6982 unsigned char byte = (value & 0x7f);
6983 value >>= 7;
6984 if (value != 0)
6985 /* More bytes to follow. */
6986 byte |= 0x80;
6987 CHECKSUM (byte);
6988 if (value == 0)
6989 break;
6990 }
6991 }
6992
6993 /* Checksum the context of the DIE. This adds the names of any
6994 surrounding namespaces or structures to the checksum. */
6995
6996 static void
6997 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
6998 {
6999 const char *name;
7000 dw_die_ref spec;
7001 int tag = die->die_tag;
7002
7003 if (tag != DW_TAG_namespace
7004 && tag != DW_TAG_structure_type
7005 && tag != DW_TAG_class_type)
7006 return;
7007
7008 name = get_AT_string (die, DW_AT_name);
7009
7010 spec = get_AT_ref (die, DW_AT_specification);
7011 if (spec != NULL)
7012 die = spec;
7013
7014 if (die->die_parent != NULL)
7015 checksum_die_context (die->die_parent, ctx);
7016
7017 CHECKSUM_ULEB128 ('C');
7018 CHECKSUM_ULEB128 (tag);
7019 if (name != NULL)
7020 CHECKSUM_STRING (name);
7021 }
7022
7023 /* Calculate the checksum of a location expression. */
7024
7025 static inline void
7026 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7027 {
7028 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7029 were emitted as a DW_FORM_sdata instead of a location expression. */
7030 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7031 {
7032 CHECKSUM_ULEB128 (DW_FORM_sdata);
7033 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7034 return;
7035 }
7036
7037 /* Otherwise, just checksum the raw location expression. */
7038 while (loc != NULL)
7039 {
7040 inchash::hash hstate;
7041 hashval_t hash;
7042
7043 CHECKSUM_ULEB128 (loc->dtprel);
7044 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7045 hash_loc_operands (loc, hstate);
7046 hash = hstate.end ();
7047 CHECKSUM (hash);
7048 loc = loc->dw_loc_next;
7049 }
7050 }
7051
7052 /* Calculate the checksum of an attribute. */
7053
7054 static void
7055 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7056 struct md5_ctx *ctx, int *mark)
7057 {
7058 dw_loc_descr_ref loc;
7059 rtx r;
7060
7061 if (AT_class (at) == dw_val_class_die_ref)
7062 {
7063 dw_die_ref target_die = AT_ref (at);
7064
7065 /* For pointer and reference types, we checksum only the (qualified)
7066 name of the target type (if there is a name). For friend entries,
7067 we checksum only the (qualified) name of the target type or function.
7068 This allows the checksum to remain the same whether the target type
7069 is complete or not. */
7070 if ((at->dw_attr == DW_AT_type
7071 && (tag == DW_TAG_pointer_type
7072 || tag == DW_TAG_reference_type
7073 || tag == DW_TAG_rvalue_reference_type
7074 || tag == DW_TAG_ptr_to_member_type))
7075 || (at->dw_attr == DW_AT_friend
7076 && tag == DW_TAG_friend))
7077 {
7078 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7079
7080 if (name_attr != NULL)
7081 {
7082 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7083
7084 if (decl == NULL)
7085 decl = target_die;
7086 CHECKSUM_ULEB128 ('N');
7087 CHECKSUM_ULEB128 (at->dw_attr);
7088 if (decl->die_parent != NULL)
7089 checksum_die_context (decl->die_parent, ctx);
7090 CHECKSUM_ULEB128 ('E');
7091 CHECKSUM_STRING (AT_string (name_attr));
7092 return;
7093 }
7094 }
7095
7096 /* For all other references to another DIE, we check to see if the
7097 target DIE has already been visited. If it has, we emit a
7098 backward reference; if not, we descend recursively. */
7099 if (target_die->die_mark > 0)
7100 {
7101 CHECKSUM_ULEB128 ('R');
7102 CHECKSUM_ULEB128 (at->dw_attr);
7103 CHECKSUM_ULEB128 (target_die->die_mark);
7104 }
7105 else
7106 {
7107 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7108
7109 if (decl == NULL)
7110 decl = target_die;
7111 target_die->die_mark = ++(*mark);
7112 CHECKSUM_ULEB128 ('T');
7113 CHECKSUM_ULEB128 (at->dw_attr);
7114 if (decl->die_parent != NULL)
7115 checksum_die_context (decl->die_parent, ctx);
7116 die_checksum_ordered (target_die, ctx, mark);
7117 }
7118 return;
7119 }
7120
7121 CHECKSUM_ULEB128 ('A');
7122 CHECKSUM_ULEB128 (at->dw_attr);
7123
7124 switch (AT_class (at))
7125 {
7126 case dw_val_class_const:
7127 case dw_val_class_const_implicit:
7128 CHECKSUM_ULEB128 (DW_FORM_sdata);
7129 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7130 break;
7131
7132 case dw_val_class_unsigned_const:
7133 case dw_val_class_unsigned_const_implicit:
7134 CHECKSUM_ULEB128 (DW_FORM_sdata);
7135 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7136 break;
7137
7138 case dw_val_class_const_double:
7139 CHECKSUM_ULEB128 (DW_FORM_block);
7140 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7141 CHECKSUM (at->dw_attr_val.v.val_double);
7142 break;
7143
7144 case dw_val_class_wide_int:
7145 CHECKSUM_ULEB128 (DW_FORM_block);
7146 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7147 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7148 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7149 get_full_len (*at->dw_attr_val.v.val_wide)
7150 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7151 break;
7152
7153 case dw_val_class_vec:
7154 CHECKSUM_ULEB128 (DW_FORM_block);
7155 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7156 * at->dw_attr_val.v.val_vec.elt_size);
7157 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7158 (at->dw_attr_val.v.val_vec.length
7159 * at->dw_attr_val.v.val_vec.elt_size));
7160 break;
7161
7162 case dw_val_class_flag:
7163 CHECKSUM_ULEB128 (DW_FORM_flag);
7164 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7165 break;
7166
7167 case dw_val_class_str:
7168 CHECKSUM_ULEB128 (DW_FORM_string);
7169 CHECKSUM_STRING (AT_string (at));
7170 break;
7171
7172 case dw_val_class_addr:
7173 r = AT_addr (at);
7174 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7175 CHECKSUM_ULEB128 (DW_FORM_string);
7176 CHECKSUM_STRING (XSTR (r, 0));
7177 break;
7178
7179 case dw_val_class_offset:
7180 CHECKSUM_ULEB128 (DW_FORM_sdata);
7181 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7182 break;
7183
7184 case dw_val_class_loc:
7185 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7186 loc_checksum_ordered (loc, ctx);
7187 break;
7188
7189 case dw_val_class_fde_ref:
7190 case dw_val_class_symview:
7191 case dw_val_class_lbl_id:
7192 case dw_val_class_lineptr:
7193 case dw_val_class_macptr:
7194 case dw_val_class_loclistsptr:
7195 case dw_val_class_high_pc:
7196 break;
7197
7198 case dw_val_class_file:
7199 case dw_val_class_file_implicit:
7200 CHECKSUM_ULEB128 (DW_FORM_string);
7201 CHECKSUM_STRING (AT_file (at)->filename);
7202 break;
7203
7204 case dw_val_class_data8:
7205 CHECKSUM (at->dw_attr_val.v.val_data8);
7206 break;
7207
7208 default:
7209 break;
7210 }
7211 }
7212
7213 struct checksum_attributes
7214 {
7215 dw_attr_node *at_name;
7216 dw_attr_node *at_type;
7217 dw_attr_node *at_friend;
7218 dw_attr_node *at_accessibility;
7219 dw_attr_node *at_address_class;
7220 dw_attr_node *at_alignment;
7221 dw_attr_node *at_allocated;
7222 dw_attr_node *at_artificial;
7223 dw_attr_node *at_associated;
7224 dw_attr_node *at_binary_scale;
7225 dw_attr_node *at_bit_offset;
7226 dw_attr_node *at_bit_size;
7227 dw_attr_node *at_bit_stride;
7228 dw_attr_node *at_byte_size;
7229 dw_attr_node *at_byte_stride;
7230 dw_attr_node *at_const_value;
7231 dw_attr_node *at_containing_type;
7232 dw_attr_node *at_count;
7233 dw_attr_node *at_data_location;
7234 dw_attr_node *at_data_member_location;
7235 dw_attr_node *at_decimal_scale;
7236 dw_attr_node *at_decimal_sign;
7237 dw_attr_node *at_default_value;
7238 dw_attr_node *at_digit_count;
7239 dw_attr_node *at_discr;
7240 dw_attr_node *at_discr_list;
7241 dw_attr_node *at_discr_value;
7242 dw_attr_node *at_encoding;
7243 dw_attr_node *at_endianity;
7244 dw_attr_node *at_explicit;
7245 dw_attr_node *at_is_optional;
7246 dw_attr_node *at_location;
7247 dw_attr_node *at_lower_bound;
7248 dw_attr_node *at_mutable;
7249 dw_attr_node *at_ordering;
7250 dw_attr_node *at_picture_string;
7251 dw_attr_node *at_prototyped;
7252 dw_attr_node *at_small;
7253 dw_attr_node *at_segment;
7254 dw_attr_node *at_string_length;
7255 dw_attr_node *at_string_length_bit_size;
7256 dw_attr_node *at_string_length_byte_size;
7257 dw_attr_node *at_threads_scaled;
7258 dw_attr_node *at_upper_bound;
7259 dw_attr_node *at_use_location;
7260 dw_attr_node *at_use_UTF8;
7261 dw_attr_node *at_variable_parameter;
7262 dw_attr_node *at_virtuality;
7263 dw_attr_node *at_visibility;
7264 dw_attr_node *at_vtable_elem_location;
7265 };
7266
7267 /* Collect the attributes that we will want to use for the checksum. */
7268
7269 static void
7270 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7271 {
7272 dw_attr_node *a;
7273 unsigned ix;
7274
7275 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7276 {
7277 switch (a->dw_attr)
7278 {
7279 case DW_AT_name:
7280 attrs->at_name = a;
7281 break;
7282 case DW_AT_type:
7283 attrs->at_type = a;
7284 break;
7285 case DW_AT_friend:
7286 attrs->at_friend = a;
7287 break;
7288 case DW_AT_accessibility:
7289 attrs->at_accessibility = a;
7290 break;
7291 case DW_AT_address_class:
7292 attrs->at_address_class = a;
7293 break;
7294 case DW_AT_alignment:
7295 attrs->at_alignment = a;
7296 break;
7297 case DW_AT_allocated:
7298 attrs->at_allocated = a;
7299 break;
7300 case DW_AT_artificial:
7301 attrs->at_artificial = a;
7302 break;
7303 case DW_AT_associated:
7304 attrs->at_associated = a;
7305 break;
7306 case DW_AT_binary_scale:
7307 attrs->at_binary_scale = a;
7308 break;
7309 case DW_AT_bit_offset:
7310 attrs->at_bit_offset = a;
7311 break;
7312 case DW_AT_bit_size:
7313 attrs->at_bit_size = a;
7314 break;
7315 case DW_AT_bit_stride:
7316 attrs->at_bit_stride = a;
7317 break;
7318 case DW_AT_byte_size:
7319 attrs->at_byte_size = a;
7320 break;
7321 case DW_AT_byte_stride:
7322 attrs->at_byte_stride = a;
7323 break;
7324 case DW_AT_const_value:
7325 attrs->at_const_value = a;
7326 break;
7327 case DW_AT_containing_type:
7328 attrs->at_containing_type = a;
7329 break;
7330 case DW_AT_count:
7331 attrs->at_count = a;
7332 break;
7333 case DW_AT_data_location:
7334 attrs->at_data_location = a;
7335 break;
7336 case DW_AT_data_member_location:
7337 attrs->at_data_member_location = a;
7338 break;
7339 case DW_AT_decimal_scale:
7340 attrs->at_decimal_scale = a;
7341 break;
7342 case DW_AT_decimal_sign:
7343 attrs->at_decimal_sign = a;
7344 break;
7345 case DW_AT_default_value:
7346 attrs->at_default_value = a;
7347 break;
7348 case DW_AT_digit_count:
7349 attrs->at_digit_count = a;
7350 break;
7351 case DW_AT_discr:
7352 attrs->at_discr = a;
7353 break;
7354 case DW_AT_discr_list:
7355 attrs->at_discr_list = a;
7356 break;
7357 case DW_AT_discr_value:
7358 attrs->at_discr_value = a;
7359 break;
7360 case DW_AT_encoding:
7361 attrs->at_encoding = a;
7362 break;
7363 case DW_AT_endianity:
7364 attrs->at_endianity = a;
7365 break;
7366 case DW_AT_explicit:
7367 attrs->at_explicit = a;
7368 break;
7369 case DW_AT_is_optional:
7370 attrs->at_is_optional = a;
7371 break;
7372 case DW_AT_location:
7373 attrs->at_location = a;
7374 break;
7375 case DW_AT_lower_bound:
7376 attrs->at_lower_bound = a;
7377 break;
7378 case DW_AT_mutable:
7379 attrs->at_mutable = a;
7380 break;
7381 case DW_AT_ordering:
7382 attrs->at_ordering = a;
7383 break;
7384 case DW_AT_picture_string:
7385 attrs->at_picture_string = a;
7386 break;
7387 case DW_AT_prototyped:
7388 attrs->at_prototyped = a;
7389 break;
7390 case DW_AT_small:
7391 attrs->at_small = a;
7392 break;
7393 case DW_AT_segment:
7394 attrs->at_segment = a;
7395 break;
7396 case DW_AT_string_length:
7397 attrs->at_string_length = a;
7398 break;
7399 case DW_AT_string_length_bit_size:
7400 attrs->at_string_length_bit_size = a;
7401 break;
7402 case DW_AT_string_length_byte_size:
7403 attrs->at_string_length_byte_size = a;
7404 break;
7405 case DW_AT_threads_scaled:
7406 attrs->at_threads_scaled = a;
7407 break;
7408 case DW_AT_upper_bound:
7409 attrs->at_upper_bound = a;
7410 break;
7411 case DW_AT_use_location:
7412 attrs->at_use_location = a;
7413 break;
7414 case DW_AT_use_UTF8:
7415 attrs->at_use_UTF8 = a;
7416 break;
7417 case DW_AT_variable_parameter:
7418 attrs->at_variable_parameter = a;
7419 break;
7420 case DW_AT_virtuality:
7421 attrs->at_virtuality = a;
7422 break;
7423 case DW_AT_visibility:
7424 attrs->at_visibility = a;
7425 break;
7426 case DW_AT_vtable_elem_location:
7427 attrs->at_vtable_elem_location = a;
7428 break;
7429 default:
7430 break;
7431 }
7432 }
7433 }
7434
7435 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7436
7437 static void
7438 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7439 {
7440 dw_die_ref c;
7441 dw_die_ref decl;
7442 struct checksum_attributes attrs;
7443
7444 CHECKSUM_ULEB128 ('D');
7445 CHECKSUM_ULEB128 (die->die_tag);
7446
7447 memset (&attrs, 0, sizeof (attrs));
7448
7449 decl = get_AT_ref (die, DW_AT_specification);
7450 if (decl != NULL)
7451 collect_checksum_attributes (&attrs, decl);
7452 collect_checksum_attributes (&attrs, die);
7453
7454 CHECKSUM_ATTR (attrs.at_name);
7455 CHECKSUM_ATTR (attrs.at_accessibility);
7456 CHECKSUM_ATTR (attrs.at_address_class);
7457 CHECKSUM_ATTR (attrs.at_allocated);
7458 CHECKSUM_ATTR (attrs.at_artificial);
7459 CHECKSUM_ATTR (attrs.at_associated);
7460 CHECKSUM_ATTR (attrs.at_binary_scale);
7461 CHECKSUM_ATTR (attrs.at_bit_offset);
7462 CHECKSUM_ATTR (attrs.at_bit_size);
7463 CHECKSUM_ATTR (attrs.at_bit_stride);
7464 CHECKSUM_ATTR (attrs.at_byte_size);
7465 CHECKSUM_ATTR (attrs.at_byte_stride);
7466 CHECKSUM_ATTR (attrs.at_const_value);
7467 CHECKSUM_ATTR (attrs.at_containing_type);
7468 CHECKSUM_ATTR (attrs.at_count);
7469 CHECKSUM_ATTR (attrs.at_data_location);
7470 CHECKSUM_ATTR (attrs.at_data_member_location);
7471 CHECKSUM_ATTR (attrs.at_decimal_scale);
7472 CHECKSUM_ATTR (attrs.at_decimal_sign);
7473 CHECKSUM_ATTR (attrs.at_default_value);
7474 CHECKSUM_ATTR (attrs.at_digit_count);
7475 CHECKSUM_ATTR (attrs.at_discr);
7476 CHECKSUM_ATTR (attrs.at_discr_list);
7477 CHECKSUM_ATTR (attrs.at_discr_value);
7478 CHECKSUM_ATTR (attrs.at_encoding);
7479 CHECKSUM_ATTR (attrs.at_endianity);
7480 CHECKSUM_ATTR (attrs.at_explicit);
7481 CHECKSUM_ATTR (attrs.at_is_optional);
7482 CHECKSUM_ATTR (attrs.at_location);
7483 CHECKSUM_ATTR (attrs.at_lower_bound);
7484 CHECKSUM_ATTR (attrs.at_mutable);
7485 CHECKSUM_ATTR (attrs.at_ordering);
7486 CHECKSUM_ATTR (attrs.at_picture_string);
7487 CHECKSUM_ATTR (attrs.at_prototyped);
7488 CHECKSUM_ATTR (attrs.at_small);
7489 CHECKSUM_ATTR (attrs.at_segment);
7490 CHECKSUM_ATTR (attrs.at_string_length);
7491 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7492 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7493 CHECKSUM_ATTR (attrs.at_threads_scaled);
7494 CHECKSUM_ATTR (attrs.at_upper_bound);
7495 CHECKSUM_ATTR (attrs.at_use_location);
7496 CHECKSUM_ATTR (attrs.at_use_UTF8);
7497 CHECKSUM_ATTR (attrs.at_variable_parameter);
7498 CHECKSUM_ATTR (attrs.at_virtuality);
7499 CHECKSUM_ATTR (attrs.at_visibility);
7500 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7501 CHECKSUM_ATTR (attrs.at_type);
7502 CHECKSUM_ATTR (attrs.at_friend);
7503 CHECKSUM_ATTR (attrs.at_alignment);
7504
7505 /* Checksum the child DIEs. */
7506 c = die->die_child;
7507 if (c) do {
7508 dw_attr_node *name_attr;
7509
7510 c = c->die_sib;
7511 name_attr = get_AT (c, DW_AT_name);
7512 if (is_template_instantiation (c))
7513 {
7514 /* Ignore instantiations of member type and function templates. */
7515 }
7516 else if (name_attr != NULL
7517 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7518 {
7519 /* Use a shallow checksum for named nested types and member
7520 functions. */
7521 CHECKSUM_ULEB128 ('S');
7522 CHECKSUM_ULEB128 (c->die_tag);
7523 CHECKSUM_STRING (AT_string (name_attr));
7524 }
7525 else
7526 {
7527 /* Use a deep checksum for other children. */
7528 /* Mark this DIE so it gets processed when unmarking. */
7529 if (c->die_mark == 0)
7530 c->die_mark = -1;
7531 die_checksum_ordered (c, ctx, mark);
7532 }
7533 } while (c != die->die_child);
7534
7535 CHECKSUM_ULEB128 (0);
7536 }
7537
7538 /* Add a type name and tag to a hash. */
7539 static void
7540 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7541 {
7542 CHECKSUM_ULEB128 (tag);
7543 CHECKSUM_STRING (name);
7544 }
7545
7546 #undef CHECKSUM
7547 #undef CHECKSUM_STRING
7548 #undef CHECKSUM_ATTR
7549 #undef CHECKSUM_LEB128
7550 #undef CHECKSUM_ULEB128
7551
7552 /* Generate the type signature for DIE. This is computed by generating an
7553 MD5 checksum over the DIE's tag, its relevant attributes, and its
7554 children. Attributes that are references to other DIEs are processed
7555 by recursion, using the MARK field to prevent infinite recursion.
7556 If the DIE is nested inside a namespace or another type, we also
7557 need to include that context in the signature. The lower 64 bits
7558 of the resulting MD5 checksum comprise the signature. */
7559
7560 static void
7561 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7562 {
7563 int mark;
7564 const char *name;
7565 unsigned char checksum[16];
7566 struct md5_ctx ctx;
7567 dw_die_ref decl;
7568 dw_die_ref parent;
7569
7570 name = get_AT_string (die, DW_AT_name);
7571 decl = get_AT_ref (die, DW_AT_specification);
7572 parent = get_die_parent (die);
7573
7574 /* First, compute a signature for just the type name (and its surrounding
7575 context, if any. This is stored in the type unit DIE for link-time
7576 ODR (one-definition rule) checking. */
7577
7578 if (is_cxx () && name != NULL)
7579 {
7580 md5_init_ctx (&ctx);
7581
7582 /* Checksum the names of surrounding namespaces and structures. */
7583 if (parent != NULL)
7584 checksum_die_context (parent, &ctx);
7585
7586 /* Checksum the current DIE. */
7587 die_odr_checksum (die->die_tag, name, &ctx);
7588 md5_finish_ctx (&ctx, checksum);
7589
7590 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7591 }
7592
7593 /* Next, compute the complete type signature. */
7594
7595 md5_init_ctx (&ctx);
7596 mark = 1;
7597 die->die_mark = mark;
7598
7599 /* Checksum the names of surrounding namespaces and structures. */
7600 if (parent != NULL)
7601 checksum_die_context (parent, &ctx);
7602
7603 /* Checksum the DIE and its children. */
7604 die_checksum_ordered (die, &ctx, &mark);
7605 unmark_all_dies (die);
7606 md5_finish_ctx (&ctx, checksum);
7607
7608 /* Store the signature in the type node and link the type DIE and the
7609 type node together. */
7610 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7611 DWARF_TYPE_SIGNATURE_SIZE);
7612 die->comdat_type_p = true;
7613 die->die_id.die_type_node = type_node;
7614 type_node->type_die = die;
7615
7616 /* If the DIE is a specification, link its declaration to the type node
7617 as well. */
7618 if (decl != NULL)
7619 {
7620 decl->comdat_type_p = true;
7621 decl->die_id.die_type_node = type_node;
7622 }
7623 }
7624
7625 /* Do the location expressions look same? */
7626 static inline int
7627 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7628 {
7629 return loc1->dw_loc_opc == loc2->dw_loc_opc
7630 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7631 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7632 }
7633
7634 /* Do the values look the same? */
7635 static int
7636 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7637 {
7638 dw_loc_descr_ref loc1, loc2;
7639 rtx r1, r2;
7640
7641 if (v1->val_class != v2->val_class)
7642 return 0;
7643
7644 switch (v1->val_class)
7645 {
7646 case dw_val_class_const:
7647 case dw_val_class_const_implicit:
7648 return v1->v.val_int == v2->v.val_int;
7649 case dw_val_class_unsigned_const:
7650 case dw_val_class_unsigned_const_implicit:
7651 return v1->v.val_unsigned == v2->v.val_unsigned;
7652 case dw_val_class_const_double:
7653 return v1->v.val_double.high == v2->v.val_double.high
7654 && v1->v.val_double.low == v2->v.val_double.low;
7655 case dw_val_class_wide_int:
7656 return *v1->v.val_wide == *v2->v.val_wide;
7657 case dw_val_class_vec:
7658 if (v1->v.val_vec.length != v2->v.val_vec.length
7659 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7660 return 0;
7661 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7662 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7663 return 0;
7664 return 1;
7665 case dw_val_class_flag:
7666 return v1->v.val_flag == v2->v.val_flag;
7667 case dw_val_class_str:
7668 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7669
7670 case dw_val_class_addr:
7671 r1 = v1->v.val_addr;
7672 r2 = v2->v.val_addr;
7673 if (GET_CODE (r1) != GET_CODE (r2))
7674 return 0;
7675 return !rtx_equal_p (r1, r2);
7676
7677 case dw_val_class_offset:
7678 return v1->v.val_offset == v2->v.val_offset;
7679
7680 case dw_val_class_loc:
7681 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7682 loc1 && loc2;
7683 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7684 if (!same_loc_p (loc1, loc2, mark))
7685 return 0;
7686 return !loc1 && !loc2;
7687
7688 case dw_val_class_die_ref:
7689 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7690
7691 case dw_val_class_symview:
7692 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7693
7694 case dw_val_class_fde_ref:
7695 case dw_val_class_vms_delta:
7696 case dw_val_class_lbl_id:
7697 case dw_val_class_lineptr:
7698 case dw_val_class_macptr:
7699 case dw_val_class_loclistsptr:
7700 case dw_val_class_high_pc:
7701 return 1;
7702
7703 case dw_val_class_file:
7704 case dw_val_class_file_implicit:
7705 return v1->v.val_file == v2->v.val_file;
7706
7707 case dw_val_class_data8:
7708 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7709
7710 default:
7711 return 1;
7712 }
7713 }
7714
7715 /* Do the attributes look the same? */
7716
7717 static int
7718 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7719 {
7720 if (at1->dw_attr != at2->dw_attr)
7721 return 0;
7722
7723 /* We don't care that this was compiled with a different compiler
7724 snapshot; if the output is the same, that's what matters. */
7725 if (at1->dw_attr == DW_AT_producer)
7726 return 1;
7727
7728 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7729 }
7730
7731 /* Do the dies look the same? */
7732
7733 static int
7734 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7735 {
7736 dw_die_ref c1, c2;
7737 dw_attr_node *a1;
7738 unsigned ix;
7739
7740 /* To avoid infinite recursion. */
7741 if (die1->die_mark)
7742 return die1->die_mark == die2->die_mark;
7743 die1->die_mark = die2->die_mark = ++(*mark);
7744
7745 if (die1->die_tag != die2->die_tag)
7746 return 0;
7747
7748 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7749 return 0;
7750
7751 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7752 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7753 return 0;
7754
7755 c1 = die1->die_child;
7756 c2 = die2->die_child;
7757 if (! c1)
7758 {
7759 if (c2)
7760 return 0;
7761 }
7762 else
7763 for (;;)
7764 {
7765 if (!same_die_p (c1, c2, mark))
7766 return 0;
7767 c1 = c1->die_sib;
7768 c2 = c2->die_sib;
7769 if (c1 == die1->die_child)
7770 {
7771 if (c2 == die2->die_child)
7772 break;
7773 else
7774 return 0;
7775 }
7776 }
7777
7778 return 1;
7779 }
7780
7781 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7782 children, and set die_symbol. */
7783
7784 static void
7785 compute_comp_unit_symbol (dw_die_ref unit_die)
7786 {
7787 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7788 const char *base = die_name ? lbasename (die_name) : "anonymous";
7789 char *name = XALLOCAVEC (char, strlen (base) + 64);
7790 char *p;
7791 int i, mark;
7792 unsigned char checksum[16];
7793 struct md5_ctx ctx;
7794
7795 /* Compute the checksum of the DIE, then append part of it as hex digits to
7796 the name filename of the unit. */
7797
7798 md5_init_ctx (&ctx);
7799 mark = 0;
7800 die_checksum (unit_die, &ctx, &mark);
7801 unmark_all_dies (unit_die);
7802 md5_finish_ctx (&ctx, checksum);
7803
7804 /* When we this for comp_unit_die () we have a DW_AT_name that might
7805 not start with a letter but with anything valid for filenames and
7806 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7807 character is not a letter. */
7808 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7809 clean_symbol_name (name);
7810
7811 p = name + strlen (name);
7812 for (i = 0; i < 4; i++)
7813 {
7814 sprintf (p, "%.2x", checksum[i]);
7815 p += 2;
7816 }
7817
7818 unit_die->die_id.die_symbol = xstrdup (name);
7819 }
7820
7821 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7822
7823 static int
7824 is_type_die (dw_die_ref die)
7825 {
7826 switch (die->die_tag)
7827 {
7828 case DW_TAG_array_type:
7829 case DW_TAG_class_type:
7830 case DW_TAG_interface_type:
7831 case DW_TAG_enumeration_type:
7832 case DW_TAG_pointer_type:
7833 case DW_TAG_reference_type:
7834 case DW_TAG_rvalue_reference_type:
7835 case DW_TAG_string_type:
7836 case DW_TAG_structure_type:
7837 case DW_TAG_subroutine_type:
7838 case DW_TAG_union_type:
7839 case DW_TAG_ptr_to_member_type:
7840 case DW_TAG_set_type:
7841 case DW_TAG_subrange_type:
7842 case DW_TAG_base_type:
7843 case DW_TAG_const_type:
7844 case DW_TAG_file_type:
7845 case DW_TAG_packed_type:
7846 case DW_TAG_volatile_type:
7847 case DW_TAG_typedef:
7848 return 1;
7849 default:
7850 return 0;
7851 }
7852 }
7853
7854 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7855 Basically, we want to choose the bits that are likely to be shared between
7856 compilations (types) and leave out the bits that are specific to individual
7857 compilations (functions). */
7858
7859 static int
7860 is_comdat_die (dw_die_ref c)
7861 {
7862 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7863 we do for stabs. The advantage is a greater likelihood of sharing between
7864 objects that don't include headers in the same order (and therefore would
7865 put the base types in a different comdat). jason 8/28/00 */
7866
7867 if (c->die_tag == DW_TAG_base_type)
7868 return 0;
7869
7870 if (c->die_tag == DW_TAG_pointer_type
7871 || c->die_tag == DW_TAG_reference_type
7872 || c->die_tag == DW_TAG_rvalue_reference_type
7873 || c->die_tag == DW_TAG_const_type
7874 || c->die_tag == DW_TAG_volatile_type)
7875 {
7876 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7877
7878 return t ? is_comdat_die (t) : 0;
7879 }
7880
7881 return is_type_die (c);
7882 }
7883
7884 /* Returns true iff C is a compile-unit DIE. */
7885
7886 static inline bool
7887 is_cu_die (dw_die_ref c)
7888 {
7889 return c && (c->die_tag == DW_TAG_compile_unit
7890 || c->die_tag == DW_TAG_skeleton_unit);
7891 }
7892
7893 /* Returns true iff C is a unit DIE of some sort. */
7894
7895 static inline bool
7896 is_unit_die (dw_die_ref c)
7897 {
7898 return c && (c->die_tag == DW_TAG_compile_unit
7899 || c->die_tag == DW_TAG_partial_unit
7900 || c->die_tag == DW_TAG_type_unit
7901 || c->die_tag == DW_TAG_skeleton_unit);
7902 }
7903
7904 /* Returns true iff C is a namespace DIE. */
7905
7906 static inline bool
7907 is_namespace_die (dw_die_ref c)
7908 {
7909 return c && c->die_tag == DW_TAG_namespace;
7910 }
7911
7912 /* Returns true iff C is a class or structure DIE. */
7913
7914 static inline bool
7915 is_class_die (dw_die_ref c)
7916 {
7917 return c && (c->die_tag == DW_TAG_class_type
7918 || c->die_tag == DW_TAG_structure_type);
7919 }
7920
7921 /* Return non-zero if this DIE is a template parameter. */
7922
7923 static inline bool
7924 is_template_parameter (dw_die_ref die)
7925 {
7926 switch (die->die_tag)
7927 {
7928 case DW_TAG_template_type_param:
7929 case DW_TAG_template_value_param:
7930 case DW_TAG_GNU_template_template_param:
7931 case DW_TAG_GNU_template_parameter_pack:
7932 return true;
7933 default:
7934 return false;
7935 }
7936 }
7937
7938 /* Return non-zero if this DIE represents a template instantiation. */
7939
7940 static inline bool
7941 is_template_instantiation (dw_die_ref die)
7942 {
7943 dw_die_ref c;
7944
7945 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7946 return false;
7947 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7948 return false;
7949 }
7950
7951 static char *
7952 gen_internal_sym (const char *prefix)
7953 {
7954 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7955
7956 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7957 return xstrdup (buf);
7958 }
7959
7960 /* Return non-zero if this DIE is a declaration. */
7961
7962 static int
7963 is_declaration_die (dw_die_ref die)
7964 {
7965 dw_attr_node *a;
7966 unsigned ix;
7967
7968 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7969 if (a->dw_attr == DW_AT_declaration)
7970 return 1;
7971
7972 return 0;
7973 }
7974
7975 /* Return non-zero if this DIE is nested inside a subprogram. */
7976
7977 static int
7978 is_nested_in_subprogram (dw_die_ref die)
7979 {
7980 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7981
7982 if (decl == NULL)
7983 decl = die;
7984 return local_scope_p (decl);
7985 }
7986
7987 /* Return non-zero if this DIE contains a defining declaration of a
7988 subprogram. */
7989
7990 static int
7991 contains_subprogram_definition (dw_die_ref die)
7992 {
7993 dw_die_ref c;
7994
7995 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7996 return 1;
7997 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7998 return 0;
7999 }
8000
8001 /* Return non-zero if this is a type DIE that should be moved to a
8002 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8003 unit type. */
8004
8005 static int
8006 should_move_die_to_comdat (dw_die_ref die)
8007 {
8008 switch (die->die_tag)
8009 {
8010 case DW_TAG_class_type:
8011 case DW_TAG_structure_type:
8012 case DW_TAG_enumeration_type:
8013 case DW_TAG_union_type:
8014 /* Don't move declarations, inlined instances, types nested in a
8015 subprogram, or types that contain subprogram definitions. */
8016 if (is_declaration_die (die)
8017 || get_AT (die, DW_AT_abstract_origin)
8018 || is_nested_in_subprogram (die)
8019 || contains_subprogram_definition (die))
8020 return 0;
8021 return 1;
8022 case DW_TAG_array_type:
8023 case DW_TAG_interface_type:
8024 case DW_TAG_pointer_type:
8025 case DW_TAG_reference_type:
8026 case DW_TAG_rvalue_reference_type:
8027 case DW_TAG_string_type:
8028 case DW_TAG_subroutine_type:
8029 case DW_TAG_ptr_to_member_type:
8030 case DW_TAG_set_type:
8031 case DW_TAG_subrange_type:
8032 case DW_TAG_base_type:
8033 case DW_TAG_const_type:
8034 case DW_TAG_file_type:
8035 case DW_TAG_packed_type:
8036 case DW_TAG_volatile_type:
8037 case DW_TAG_typedef:
8038 default:
8039 return 0;
8040 }
8041 }
8042
8043 /* Make a clone of DIE. */
8044
8045 static dw_die_ref
8046 clone_die (dw_die_ref die)
8047 {
8048 dw_die_ref clone = new_die_raw (die->die_tag);
8049 dw_attr_node *a;
8050 unsigned ix;
8051
8052 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8053 add_dwarf_attr (clone, a);
8054
8055 return clone;
8056 }
8057
8058 /* Make a clone of the tree rooted at DIE. */
8059
8060 static dw_die_ref
8061 clone_tree (dw_die_ref die)
8062 {
8063 dw_die_ref c;
8064 dw_die_ref clone = clone_die (die);
8065
8066 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8067
8068 return clone;
8069 }
8070
8071 /* Make a clone of DIE as a declaration. */
8072
8073 static dw_die_ref
8074 clone_as_declaration (dw_die_ref die)
8075 {
8076 dw_die_ref clone;
8077 dw_die_ref decl;
8078 dw_attr_node *a;
8079 unsigned ix;
8080
8081 /* If the DIE is already a declaration, just clone it. */
8082 if (is_declaration_die (die))
8083 return clone_die (die);
8084
8085 /* If the DIE is a specification, just clone its declaration DIE. */
8086 decl = get_AT_ref (die, DW_AT_specification);
8087 if (decl != NULL)
8088 {
8089 clone = clone_die (decl);
8090 if (die->comdat_type_p)
8091 add_AT_die_ref (clone, DW_AT_signature, die);
8092 return clone;
8093 }
8094
8095 clone = new_die_raw (die->die_tag);
8096
8097 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8098 {
8099 /* We don't want to copy over all attributes.
8100 For example we don't want DW_AT_byte_size because otherwise we will no
8101 longer have a declaration and GDB will treat it as a definition. */
8102
8103 switch (a->dw_attr)
8104 {
8105 case DW_AT_abstract_origin:
8106 case DW_AT_artificial:
8107 case DW_AT_containing_type:
8108 case DW_AT_external:
8109 case DW_AT_name:
8110 case DW_AT_type:
8111 case DW_AT_virtuality:
8112 case DW_AT_linkage_name:
8113 case DW_AT_MIPS_linkage_name:
8114 add_dwarf_attr (clone, a);
8115 break;
8116 case DW_AT_byte_size:
8117 case DW_AT_alignment:
8118 default:
8119 break;
8120 }
8121 }
8122
8123 if (die->comdat_type_p)
8124 add_AT_die_ref (clone, DW_AT_signature, die);
8125
8126 add_AT_flag (clone, DW_AT_declaration, 1);
8127 return clone;
8128 }
8129
8130
8131 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8132
8133 struct decl_table_entry
8134 {
8135 dw_die_ref orig;
8136 dw_die_ref copy;
8137 };
8138
8139 /* Helpers to manipulate hash table of copied declarations. */
8140
8141 /* Hashtable helpers. */
8142
8143 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8144 {
8145 typedef die_struct *compare_type;
8146 static inline hashval_t hash (const decl_table_entry *);
8147 static inline bool equal (const decl_table_entry *, const die_struct *);
8148 };
8149
8150 inline hashval_t
8151 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8152 {
8153 return htab_hash_pointer (entry->orig);
8154 }
8155
8156 inline bool
8157 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8158 const die_struct *entry2)
8159 {
8160 return entry1->orig == entry2;
8161 }
8162
8163 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8164
8165 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8166 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8167 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8168 to check if the ancestor has already been copied into UNIT. */
8169
8170 static dw_die_ref
8171 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8172 decl_hash_type *decl_table)
8173 {
8174 dw_die_ref parent = die->die_parent;
8175 dw_die_ref new_parent = unit;
8176 dw_die_ref copy;
8177 decl_table_entry **slot = NULL;
8178 struct decl_table_entry *entry = NULL;
8179
8180 if (decl_table)
8181 {
8182 /* Check if the entry has already been copied to UNIT. */
8183 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8184 INSERT);
8185 if (*slot != HTAB_EMPTY_ENTRY)
8186 {
8187 entry = *slot;
8188 return entry->copy;
8189 }
8190
8191 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8192 entry = XCNEW (struct decl_table_entry);
8193 entry->orig = die;
8194 entry->copy = NULL;
8195 *slot = entry;
8196 }
8197
8198 if (parent != NULL)
8199 {
8200 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8201 if (spec != NULL)
8202 parent = spec;
8203 if (!is_unit_die (parent))
8204 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8205 }
8206
8207 copy = clone_as_declaration (die);
8208 add_child_die (new_parent, copy);
8209
8210 if (decl_table)
8211 {
8212 /* Record the pointer to the copy. */
8213 entry->copy = copy;
8214 }
8215
8216 return copy;
8217 }
8218 /* Copy the declaration context to the new type unit DIE. This includes
8219 any surrounding namespace or type declarations. If the DIE has an
8220 AT_specification attribute, it also includes attributes and children
8221 attached to the specification, and returns a pointer to the original
8222 parent of the declaration DIE. Returns NULL otherwise. */
8223
8224 static dw_die_ref
8225 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8226 {
8227 dw_die_ref decl;
8228 dw_die_ref new_decl;
8229 dw_die_ref orig_parent = NULL;
8230
8231 decl = get_AT_ref (die, DW_AT_specification);
8232 if (decl == NULL)
8233 decl = die;
8234 else
8235 {
8236 unsigned ix;
8237 dw_die_ref c;
8238 dw_attr_node *a;
8239
8240 /* The original DIE will be changed to a declaration, and must
8241 be moved to be a child of the original declaration DIE. */
8242 orig_parent = decl->die_parent;
8243
8244 /* Copy the type node pointer from the new DIE to the original
8245 declaration DIE so we can forward references later. */
8246 decl->comdat_type_p = true;
8247 decl->die_id.die_type_node = die->die_id.die_type_node;
8248
8249 remove_AT (die, DW_AT_specification);
8250
8251 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8252 {
8253 if (a->dw_attr != DW_AT_name
8254 && a->dw_attr != DW_AT_declaration
8255 && a->dw_attr != DW_AT_external)
8256 add_dwarf_attr (die, a);
8257 }
8258
8259 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8260 }
8261
8262 if (decl->die_parent != NULL
8263 && !is_unit_die (decl->die_parent))
8264 {
8265 new_decl = copy_ancestor_tree (unit, decl, NULL);
8266 if (new_decl != NULL)
8267 {
8268 remove_AT (new_decl, DW_AT_signature);
8269 add_AT_specification (die, new_decl);
8270 }
8271 }
8272
8273 return orig_parent;
8274 }
8275
8276 /* Generate the skeleton ancestor tree for the given NODE, then clone
8277 the DIE and add the clone into the tree. */
8278
8279 static void
8280 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8281 {
8282 if (node->new_die != NULL)
8283 return;
8284
8285 node->new_die = clone_as_declaration (node->old_die);
8286
8287 if (node->parent != NULL)
8288 {
8289 generate_skeleton_ancestor_tree (node->parent);
8290 add_child_die (node->parent->new_die, node->new_die);
8291 }
8292 }
8293
8294 /* Generate a skeleton tree of DIEs containing any declarations that are
8295 found in the original tree. We traverse the tree looking for declaration
8296 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8297
8298 static void
8299 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8300 {
8301 skeleton_chain_node node;
8302 dw_die_ref c;
8303 dw_die_ref first;
8304 dw_die_ref prev = NULL;
8305 dw_die_ref next = NULL;
8306
8307 node.parent = parent;
8308
8309 first = c = parent->old_die->die_child;
8310 if (c)
8311 next = c->die_sib;
8312 if (c) do {
8313 if (prev == NULL || prev->die_sib == c)
8314 prev = c;
8315 c = next;
8316 next = (c == first ? NULL : c->die_sib);
8317 node.old_die = c;
8318 node.new_die = NULL;
8319 if (is_declaration_die (c))
8320 {
8321 if (is_template_instantiation (c))
8322 {
8323 /* Instantiated templates do not need to be cloned into the
8324 type unit. Just move the DIE and its children back to
8325 the skeleton tree (in the main CU). */
8326 remove_child_with_prev (c, prev);
8327 add_child_die (parent->new_die, c);
8328 c = prev;
8329 }
8330 else if (c->comdat_type_p)
8331 {
8332 /* This is the skeleton of earlier break_out_comdat_types
8333 type. Clone the existing DIE, but keep the children
8334 under the original (which is in the main CU). */
8335 dw_die_ref clone = clone_die (c);
8336
8337 replace_child (c, clone, prev);
8338 generate_skeleton_ancestor_tree (parent);
8339 add_child_die (parent->new_die, c);
8340 c = clone;
8341 continue;
8342 }
8343 else
8344 {
8345 /* Clone the existing DIE, move the original to the skeleton
8346 tree (which is in the main CU), and put the clone, with
8347 all the original's children, where the original came from
8348 (which is about to be moved to the type unit). */
8349 dw_die_ref clone = clone_die (c);
8350 move_all_children (c, clone);
8351
8352 /* If the original has a DW_AT_object_pointer attribute,
8353 it would now point to a child DIE just moved to the
8354 cloned tree, so we need to remove that attribute from
8355 the original. */
8356 remove_AT (c, DW_AT_object_pointer);
8357
8358 replace_child (c, clone, prev);
8359 generate_skeleton_ancestor_tree (parent);
8360 add_child_die (parent->new_die, c);
8361 node.old_die = clone;
8362 node.new_die = c;
8363 c = clone;
8364 }
8365 }
8366 generate_skeleton_bottom_up (&node);
8367 } while (next != NULL);
8368 }
8369
8370 /* Wrapper function for generate_skeleton_bottom_up. */
8371
8372 static dw_die_ref
8373 generate_skeleton (dw_die_ref die)
8374 {
8375 skeleton_chain_node node;
8376
8377 node.old_die = die;
8378 node.new_die = NULL;
8379 node.parent = NULL;
8380
8381 /* If this type definition is nested inside another type,
8382 and is not an instantiation of a template, always leave
8383 at least a declaration in its place. */
8384 if (die->die_parent != NULL
8385 && is_type_die (die->die_parent)
8386 && !is_template_instantiation (die))
8387 node.new_die = clone_as_declaration (die);
8388
8389 generate_skeleton_bottom_up (&node);
8390 return node.new_die;
8391 }
8392
8393 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8394 declaration. The original DIE is moved to a new compile unit so that
8395 existing references to it follow it to the new location. If any of the
8396 original DIE's descendants is a declaration, we need to replace the
8397 original DIE with a skeleton tree and move the declarations back into the
8398 skeleton tree. */
8399
8400 static dw_die_ref
8401 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8402 dw_die_ref prev)
8403 {
8404 dw_die_ref skeleton, orig_parent;
8405
8406 /* Copy the declaration context to the type unit DIE. If the returned
8407 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8408 that DIE. */
8409 orig_parent = copy_declaration_context (unit, child);
8410
8411 skeleton = generate_skeleton (child);
8412 if (skeleton == NULL)
8413 remove_child_with_prev (child, prev);
8414 else
8415 {
8416 skeleton->comdat_type_p = true;
8417 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8418
8419 /* If the original DIE was a specification, we need to put
8420 the skeleton under the parent DIE of the declaration.
8421 This leaves the original declaration in the tree, but
8422 it will be pruned later since there are no longer any
8423 references to it. */
8424 if (orig_parent != NULL)
8425 {
8426 remove_child_with_prev (child, prev);
8427 add_child_die (orig_parent, skeleton);
8428 }
8429 else
8430 replace_child (child, skeleton, prev);
8431 }
8432
8433 return skeleton;
8434 }
8435
8436 static void
8437 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8438 comdat_type_node *type_node,
8439 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8440
8441 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8442 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8443 DWARF procedure references in the DW_AT_location attribute. */
8444
8445 static dw_die_ref
8446 copy_dwarf_procedure (dw_die_ref die,
8447 comdat_type_node *type_node,
8448 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8449 {
8450 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8451
8452 /* DWARF procedures are not supposed to have children... */
8453 gcc_assert (die->die_child == NULL);
8454
8455 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8456 gcc_assert (vec_safe_length (die->die_attr) == 1
8457 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8458
8459 /* Do not copy more than once DWARF procedures. */
8460 bool existed;
8461 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8462 if (existed)
8463 return die_copy;
8464
8465 die_copy = clone_die (die);
8466 add_child_die (type_node->root_die, die_copy);
8467 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8468 return die_copy;
8469 }
8470
8471 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8472 procedures in DIE's attributes. */
8473
8474 static void
8475 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8476 comdat_type_node *type_node,
8477 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8478 {
8479 dw_attr_node *a;
8480 unsigned i;
8481
8482 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8483 {
8484 dw_loc_descr_ref loc;
8485
8486 if (a->dw_attr_val.val_class != dw_val_class_loc)
8487 continue;
8488
8489 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8490 {
8491 switch (loc->dw_loc_opc)
8492 {
8493 case DW_OP_call2:
8494 case DW_OP_call4:
8495 case DW_OP_call_ref:
8496 gcc_assert (loc->dw_loc_oprnd1.val_class
8497 == dw_val_class_die_ref);
8498 loc->dw_loc_oprnd1.v.val_die_ref.die
8499 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8500 type_node,
8501 copied_dwarf_procs);
8502
8503 default:
8504 break;
8505 }
8506 }
8507 }
8508 }
8509
8510 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8511 rewrite references to point to the copies.
8512
8513 References are looked for in DIE's attributes and recursively in all its
8514 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8515 mapping from old DWARF procedures to their copy. It is used not to copy
8516 twice the same DWARF procedure under TYPE_NODE. */
8517
8518 static void
8519 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8520 comdat_type_node *type_node,
8521 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8522 {
8523 dw_die_ref c;
8524
8525 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8526 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8527 type_node,
8528 copied_dwarf_procs));
8529 }
8530
8531 /* Traverse the DIE and set up additional .debug_types or .debug_info
8532 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8533 section. */
8534
8535 static void
8536 break_out_comdat_types (dw_die_ref die)
8537 {
8538 dw_die_ref c;
8539 dw_die_ref first;
8540 dw_die_ref prev = NULL;
8541 dw_die_ref next = NULL;
8542 dw_die_ref unit = NULL;
8543
8544 first = c = die->die_child;
8545 if (c)
8546 next = c->die_sib;
8547 if (c) do {
8548 if (prev == NULL || prev->die_sib == c)
8549 prev = c;
8550 c = next;
8551 next = (c == first ? NULL : c->die_sib);
8552 if (should_move_die_to_comdat (c))
8553 {
8554 dw_die_ref replacement;
8555 comdat_type_node *type_node;
8556
8557 /* Break out nested types into their own type units. */
8558 break_out_comdat_types (c);
8559
8560 /* Create a new type unit DIE as the root for the new tree, and
8561 add it to the list of comdat types. */
8562 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8563 add_AT_unsigned (unit, DW_AT_language,
8564 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8565 type_node = ggc_cleared_alloc<comdat_type_node> ();
8566 type_node->root_die = unit;
8567 type_node->next = comdat_type_list;
8568 comdat_type_list = type_node;
8569
8570 /* Generate the type signature. */
8571 generate_type_signature (c, type_node);
8572
8573 /* Copy the declaration context, attributes, and children of the
8574 declaration into the new type unit DIE, then remove this DIE
8575 from the main CU (or replace it with a skeleton if necessary). */
8576 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8577 type_node->skeleton_die = replacement;
8578
8579 /* Add the DIE to the new compunit. */
8580 add_child_die (unit, c);
8581
8582 /* Types can reference DWARF procedures for type size or data location
8583 expressions. Calls in DWARF expressions cannot target procedures
8584 that are not in the same section. So we must copy DWARF procedures
8585 along with this type and then rewrite references to them. */
8586 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8587 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8588
8589 if (replacement != NULL)
8590 c = replacement;
8591 }
8592 else if (c->die_tag == DW_TAG_namespace
8593 || c->die_tag == DW_TAG_class_type
8594 || c->die_tag == DW_TAG_structure_type
8595 || c->die_tag == DW_TAG_union_type)
8596 {
8597 /* Look for nested types that can be broken out. */
8598 break_out_comdat_types (c);
8599 }
8600 } while (next != NULL);
8601 }
8602
8603 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8604 Enter all the cloned children into the hash table decl_table. */
8605
8606 static dw_die_ref
8607 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8608 {
8609 dw_die_ref c;
8610 dw_die_ref clone;
8611 struct decl_table_entry *entry;
8612 decl_table_entry **slot;
8613
8614 if (die->die_tag == DW_TAG_subprogram)
8615 clone = clone_as_declaration (die);
8616 else
8617 clone = clone_die (die);
8618
8619 slot = decl_table->find_slot_with_hash (die,
8620 htab_hash_pointer (die), INSERT);
8621
8622 /* Assert that DIE isn't in the hash table yet. If it would be there
8623 before, the ancestors would be necessarily there as well, therefore
8624 clone_tree_partial wouldn't be called. */
8625 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8626
8627 entry = XCNEW (struct decl_table_entry);
8628 entry->orig = die;
8629 entry->copy = clone;
8630 *slot = entry;
8631
8632 if (die->die_tag != DW_TAG_subprogram)
8633 FOR_EACH_CHILD (die, c,
8634 add_child_die (clone, clone_tree_partial (c, decl_table)));
8635
8636 return clone;
8637 }
8638
8639 /* Walk the DIE and its children, looking for references to incomplete
8640 or trivial types that are unmarked (i.e., that are not in the current
8641 type_unit). */
8642
8643 static void
8644 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8645 {
8646 dw_die_ref c;
8647 dw_attr_node *a;
8648 unsigned ix;
8649
8650 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8651 {
8652 if (AT_class (a) == dw_val_class_die_ref)
8653 {
8654 dw_die_ref targ = AT_ref (a);
8655 decl_table_entry **slot;
8656 struct decl_table_entry *entry;
8657
8658 if (targ->die_mark != 0 || targ->comdat_type_p)
8659 continue;
8660
8661 slot = decl_table->find_slot_with_hash (targ,
8662 htab_hash_pointer (targ),
8663 INSERT);
8664
8665 if (*slot != HTAB_EMPTY_ENTRY)
8666 {
8667 /* TARG has already been copied, so we just need to
8668 modify the reference to point to the copy. */
8669 entry = *slot;
8670 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8671 }
8672 else
8673 {
8674 dw_die_ref parent = unit;
8675 dw_die_ref copy = clone_die (targ);
8676
8677 /* Record in DECL_TABLE that TARG has been copied.
8678 Need to do this now, before the recursive call,
8679 because DECL_TABLE may be expanded and SLOT
8680 would no longer be a valid pointer. */
8681 entry = XCNEW (struct decl_table_entry);
8682 entry->orig = targ;
8683 entry->copy = copy;
8684 *slot = entry;
8685
8686 /* If TARG is not a declaration DIE, we need to copy its
8687 children. */
8688 if (!is_declaration_die (targ))
8689 {
8690 FOR_EACH_CHILD (
8691 targ, c,
8692 add_child_die (copy,
8693 clone_tree_partial (c, decl_table)));
8694 }
8695
8696 /* Make sure the cloned tree is marked as part of the
8697 type unit. */
8698 mark_dies (copy);
8699
8700 /* If TARG has surrounding context, copy its ancestor tree
8701 into the new type unit. */
8702 if (targ->die_parent != NULL
8703 && !is_unit_die (targ->die_parent))
8704 parent = copy_ancestor_tree (unit, targ->die_parent,
8705 decl_table);
8706
8707 add_child_die (parent, copy);
8708 a->dw_attr_val.v.val_die_ref.die = copy;
8709
8710 /* Make sure the newly-copied DIE is walked. If it was
8711 installed in a previously-added context, it won't
8712 get visited otherwise. */
8713 if (parent != unit)
8714 {
8715 /* Find the highest point of the newly-added tree,
8716 mark each node along the way, and walk from there. */
8717 parent->die_mark = 1;
8718 while (parent->die_parent
8719 && parent->die_parent->die_mark == 0)
8720 {
8721 parent = parent->die_parent;
8722 parent->die_mark = 1;
8723 }
8724 copy_decls_walk (unit, parent, decl_table);
8725 }
8726 }
8727 }
8728 }
8729
8730 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8731 }
8732
8733 /* Copy declarations for "unworthy" types into the new comdat section.
8734 Incomplete types, modified types, and certain other types aren't broken
8735 out into comdat sections of their own, so they don't have a signature,
8736 and we need to copy the declaration into the same section so that we
8737 don't have an external reference. */
8738
8739 static void
8740 copy_decls_for_unworthy_types (dw_die_ref unit)
8741 {
8742 mark_dies (unit);
8743 decl_hash_type decl_table (10);
8744 copy_decls_walk (unit, unit, &decl_table);
8745 unmark_dies (unit);
8746 }
8747
8748 /* Traverse the DIE and add a sibling attribute if it may have the
8749 effect of speeding up access to siblings. To save some space,
8750 avoid generating sibling attributes for DIE's without children. */
8751
8752 static void
8753 add_sibling_attributes (dw_die_ref die)
8754 {
8755 dw_die_ref c;
8756
8757 if (! die->die_child)
8758 return;
8759
8760 if (die->die_parent && die != die->die_parent->die_child)
8761 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8762
8763 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8764 }
8765
8766 /* Output all location lists for the DIE and its children. */
8767
8768 static void
8769 output_location_lists (dw_die_ref die)
8770 {
8771 dw_die_ref c;
8772 dw_attr_node *a;
8773 unsigned ix;
8774
8775 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8776 if (AT_class (a) == dw_val_class_loc_list)
8777 output_loc_list (AT_loc_list (a));
8778
8779 FOR_EACH_CHILD (die, c, output_location_lists (c));
8780 }
8781
8782 /* During assign_location_list_indexes and output_loclists_offset the
8783 current index, after it the number of assigned indexes (i.e. how
8784 large the .debug_loclists* offset table should be). */
8785 static unsigned int loc_list_idx;
8786
8787 /* Output all location list offsets for the DIE and its children. */
8788
8789 static void
8790 output_loclists_offsets (dw_die_ref die)
8791 {
8792 dw_die_ref c;
8793 dw_attr_node *a;
8794 unsigned ix;
8795
8796 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8797 if (AT_class (a) == dw_val_class_loc_list)
8798 {
8799 dw_loc_list_ref l = AT_loc_list (a);
8800 if (l->offset_emitted)
8801 continue;
8802 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8803 loc_section_label, NULL);
8804 gcc_assert (l->hash == loc_list_idx);
8805 loc_list_idx++;
8806 l->offset_emitted = true;
8807 }
8808
8809 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8810 }
8811
8812 /* Recursively set indexes of location lists. */
8813
8814 static void
8815 assign_location_list_indexes (dw_die_ref die)
8816 {
8817 dw_die_ref c;
8818 dw_attr_node *a;
8819 unsigned ix;
8820
8821 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8822 if (AT_class (a) == dw_val_class_loc_list)
8823 {
8824 dw_loc_list_ref list = AT_loc_list (a);
8825 if (!list->num_assigned)
8826 {
8827 list->num_assigned = true;
8828 list->hash = loc_list_idx++;
8829 }
8830 }
8831
8832 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8833 }
8834
8835 /* We want to limit the number of external references, because they are
8836 larger than local references: a relocation takes multiple words, and
8837 even a sig8 reference is always eight bytes, whereas a local reference
8838 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8839 So if we encounter multiple external references to the same type DIE, we
8840 make a local typedef stub for it and redirect all references there.
8841
8842 This is the element of the hash table for keeping track of these
8843 references. */
8844
8845 struct external_ref
8846 {
8847 dw_die_ref type;
8848 dw_die_ref stub;
8849 unsigned n_refs;
8850 };
8851
8852 /* Hashtable helpers. */
8853
8854 struct external_ref_hasher : free_ptr_hash <external_ref>
8855 {
8856 static inline hashval_t hash (const external_ref *);
8857 static inline bool equal (const external_ref *, const external_ref *);
8858 };
8859
8860 inline hashval_t
8861 external_ref_hasher::hash (const external_ref *r)
8862 {
8863 dw_die_ref die = r->type;
8864 hashval_t h = 0;
8865
8866 /* We can't use the address of the DIE for hashing, because
8867 that will make the order of the stub DIEs non-deterministic. */
8868 if (! die->comdat_type_p)
8869 /* We have a symbol; use it to compute a hash. */
8870 h = htab_hash_string (die->die_id.die_symbol);
8871 else
8872 {
8873 /* We have a type signature; use a subset of the bits as the hash.
8874 The 8-byte signature is at least as large as hashval_t. */
8875 comdat_type_node *type_node = die->die_id.die_type_node;
8876 memcpy (&h, type_node->signature, sizeof (h));
8877 }
8878 return h;
8879 }
8880
8881 inline bool
8882 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8883 {
8884 return r1->type == r2->type;
8885 }
8886
8887 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8888
8889 /* Return a pointer to the external_ref for references to DIE. */
8890
8891 static struct external_ref *
8892 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8893 {
8894 struct external_ref ref, *ref_p;
8895 external_ref **slot;
8896
8897 ref.type = die;
8898 slot = map->find_slot (&ref, INSERT);
8899 if (*slot != HTAB_EMPTY_ENTRY)
8900 return *slot;
8901
8902 ref_p = XCNEW (struct external_ref);
8903 ref_p->type = die;
8904 *slot = ref_p;
8905 return ref_p;
8906 }
8907
8908 /* Subroutine of optimize_external_refs, below.
8909
8910 If we see a type skeleton, record it as our stub. If we see external
8911 references, remember how many we've seen. */
8912
8913 static void
8914 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8915 {
8916 dw_die_ref c;
8917 dw_attr_node *a;
8918 unsigned ix;
8919 struct external_ref *ref_p;
8920
8921 if (is_type_die (die)
8922 && (c = get_AT_ref (die, DW_AT_signature)))
8923 {
8924 /* This is a local skeleton; use it for local references. */
8925 ref_p = lookup_external_ref (map, c);
8926 ref_p->stub = die;
8927 }
8928
8929 /* Scan the DIE references, and remember any that refer to DIEs from
8930 other CUs (i.e. those which are not marked). */
8931 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8932 if (AT_class (a) == dw_val_class_die_ref
8933 && (c = AT_ref (a))->die_mark == 0
8934 && is_type_die (c))
8935 {
8936 ref_p = lookup_external_ref (map, c);
8937 ref_p->n_refs++;
8938 }
8939
8940 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8941 }
8942
8943 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8944 points to an external_ref, DATA is the CU we're processing. If we don't
8945 already have a local stub, and we have multiple refs, build a stub. */
8946
8947 int
8948 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8949 {
8950 struct external_ref *ref_p = *slot;
8951
8952 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8953 {
8954 /* We have multiple references to this type, so build a small stub.
8955 Both of these forms are a bit dodgy from the perspective of the
8956 DWARF standard, since technically they should have names. */
8957 dw_die_ref cu = data;
8958 dw_die_ref type = ref_p->type;
8959 dw_die_ref stub = NULL;
8960
8961 if (type->comdat_type_p)
8962 {
8963 /* If we refer to this type via sig8, use AT_signature. */
8964 stub = new_die (type->die_tag, cu, NULL_TREE);
8965 add_AT_die_ref (stub, DW_AT_signature, type);
8966 }
8967 else
8968 {
8969 /* Otherwise, use a typedef with no name. */
8970 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8971 add_AT_die_ref (stub, DW_AT_type, type);
8972 }
8973
8974 stub->die_mark++;
8975 ref_p->stub = stub;
8976 }
8977 return 1;
8978 }
8979
8980 /* DIE is a unit; look through all the DIE references to see if there are
8981 any external references to types, and if so, create local stubs for
8982 them which will be applied in build_abbrev_table. This is useful because
8983 references to local DIEs are smaller. */
8984
8985 static external_ref_hash_type *
8986 optimize_external_refs (dw_die_ref die)
8987 {
8988 external_ref_hash_type *map = new external_ref_hash_type (10);
8989 optimize_external_refs_1 (die, map);
8990 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8991 return map;
8992 }
8993
8994 /* The following 3 variables are temporaries that are computed only during the
8995 build_abbrev_table call and used and released during the following
8996 optimize_abbrev_table call. */
8997
8998 /* First abbrev_id that can be optimized based on usage. */
8999 static unsigned int abbrev_opt_start;
9000
9001 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9002 abbrev_id smaller than this, because they must be already sized
9003 during build_abbrev_table). */
9004 static unsigned int abbrev_opt_base_type_end;
9005
9006 /* Vector of usage counts during build_abbrev_table. Indexed by
9007 abbrev_id - abbrev_opt_start. */
9008 static vec<unsigned int> abbrev_usage_count;
9009
9010 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9011 static vec<dw_die_ref> sorted_abbrev_dies;
9012
9013 /* The format of each DIE (and its attribute value pairs) is encoded in an
9014 abbreviation table. This routine builds the abbreviation table and assigns
9015 a unique abbreviation id for each abbreviation entry. The children of each
9016 die are visited recursively. */
9017
9018 static void
9019 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9020 {
9021 unsigned int abbrev_id = 0;
9022 dw_die_ref c;
9023 dw_attr_node *a;
9024 unsigned ix;
9025 dw_die_ref abbrev;
9026
9027 /* Scan the DIE references, and replace any that refer to
9028 DIEs from other CUs (i.e. those which are not marked) with
9029 the local stubs we built in optimize_external_refs. */
9030 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9031 if (AT_class (a) == dw_val_class_die_ref
9032 && (c = AT_ref (a))->die_mark == 0)
9033 {
9034 struct external_ref *ref_p;
9035 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9036
9037 ref_p = lookup_external_ref (extern_map, c);
9038 if (ref_p->stub && ref_p->stub != die)
9039 change_AT_die_ref (a, ref_p->stub);
9040 else
9041 /* We aren't changing this reference, so mark it external. */
9042 set_AT_ref_external (a, 1);
9043 }
9044
9045 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9046 {
9047 dw_attr_node *die_a, *abbrev_a;
9048 unsigned ix;
9049 bool ok = true;
9050
9051 if (abbrev_id == 0)
9052 continue;
9053 if (abbrev->die_tag != die->die_tag)
9054 continue;
9055 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9056 continue;
9057
9058 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9059 continue;
9060
9061 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9062 {
9063 abbrev_a = &(*abbrev->die_attr)[ix];
9064 if ((abbrev_a->dw_attr != die_a->dw_attr)
9065 || (value_format (abbrev_a) != value_format (die_a)))
9066 {
9067 ok = false;
9068 break;
9069 }
9070 }
9071 if (ok)
9072 break;
9073 }
9074
9075 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9076 {
9077 vec_safe_push (abbrev_die_table, die);
9078 if (abbrev_opt_start)
9079 abbrev_usage_count.safe_push (0);
9080 }
9081 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9082 {
9083 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9084 sorted_abbrev_dies.safe_push (die);
9085 }
9086
9087 die->die_abbrev = abbrev_id;
9088 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9089 }
9090
9091 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9092 by die_abbrev's usage count, from the most commonly used
9093 abbreviation to the least. */
9094
9095 static int
9096 die_abbrev_cmp (const void *p1, const void *p2)
9097 {
9098 dw_die_ref die1 = *(const dw_die_ref *) p1;
9099 dw_die_ref die2 = *(const dw_die_ref *) p2;
9100
9101 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9102 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9103
9104 if (die1->die_abbrev >= abbrev_opt_base_type_end
9105 && die2->die_abbrev >= abbrev_opt_base_type_end)
9106 {
9107 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9108 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9109 return -1;
9110 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9111 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9112 return 1;
9113 }
9114
9115 /* Stabilize the sort. */
9116 if (die1->die_abbrev < die2->die_abbrev)
9117 return -1;
9118 if (die1->die_abbrev > die2->die_abbrev)
9119 return 1;
9120
9121 return 0;
9122 }
9123
9124 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9125 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9126 into dw_val_class_const_implicit or
9127 dw_val_class_unsigned_const_implicit. */
9128
9129 static void
9130 optimize_implicit_const (unsigned int first_id, unsigned int end,
9131 vec<bool> &implicit_consts)
9132 {
9133 /* It never makes sense if there is just one DIE using the abbreviation. */
9134 if (end < first_id + 2)
9135 return;
9136
9137 dw_attr_node *a;
9138 unsigned ix, i;
9139 dw_die_ref die = sorted_abbrev_dies[first_id];
9140 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9141 if (implicit_consts[ix])
9142 {
9143 enum dw_val_class new_class = dw_val_class_none;
9144 switch (AT_class (a))
9145 {
9146 case dw_val_class_unsigned_const:
9147 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9148 continue;
9149
9150 /* The .debug_abbrev section will grow by
9151 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9152 in all the DIEs using that abbreviation. */
9153 if (constant_size (AT_unsigned (a)) * (end - first_id)
9154 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9155 continue;
9156
9157 new_class = dw_val_class_unsigned_const_implicit;
9158 break;
9159
9160 case dw_val_class_const:
9161 new_class = dw_val_class_const_implicit;
9162 break;
9163
9164 case dw_val_class_file:
9165 new_class = dw_val_class_file_implicit;
9166 break;
9167
9168 default:
9169 continue;
9170 }
9171 for (i = first_id; i < end; i++)
9172 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9173 = new_class;
9174 }
9175 }
9176
9177 /* Attempt to optimize abbreviation table from abbrev_opt_start
9178 abbreviation above. */
9179
9180 static void
9181 optimize_abbrev_table (void)
9182 {
9183 if (abbrev_opt_start
9184 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9185 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9186 {
9187 auto_vec<bool, 32> implicit_consts;
9188 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9189
9190 unsigned int abbrev_id = abbrev_opt_start - 1;
9191 unsigned int first_id = ~0U;
9192 unsigned int last_abbrev_id = 0;
9193 unsigned int i;
9194 dw_die_ref die;
9195 if (abbrev_opt_base_type_end > abbrev_opt_start)
9196 abbrev_id = abbrev_opt_base_type_end - 1;
9197 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9198 most commonly used abbreviations come first. */
9199 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9200 {
9201 dw_attr_node *a;
9202 unsigned ix;
9203
9204 /* If calc_base_type_die_sizes has been called, the CU and
9205 base types after it can't be optimized, because we've already
9206 calculated their DIE offsets. We've sorted them first. */
9207 if (die->die_abbrev < abbrev_opt_base_type_end)
9208 continue;
9209 if (die->die_abbrev != last_abbrev_id)
9210 {
9211 last_abbrev_id = die->die_abbrev;
9212 if (dwarf_version >= 5 && first_id != ~0U)
9213 optimize_implicit_const (first_id, i, implicit_consts);
9214 abbrev_id++;
9215 (*abbrev_die_table)[abbrev_id] = die;
9216 if (dwarf_version >= 5)
9217 {
9218 first_id = i;
9219 implicit_consts.truncate (0);
9220
9221 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9222 switch (AT_class (a))
9223 {
9224 case dw_val_class_const:
9225 case dw_val_class_unsigned_const:
9226 case dw_val_class_file:
9227 implicit_consts.safe_push (true);
9228 break;
9229 default:
9230 implicit_consts.safe_push (false);
9231 break;
9232 }
9233 }
9234 }
9235 else if (dwarf_version >= 5)
9236 {
9237 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9238 if (!implicit_consts[ix])
9239 continue;
9240 else
9241 {
9242 dw_attr_node *other_a
9243 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9244 if (!dw_val_equal_p (&a->dw_attr_val,
9245 &other_a->dw_attr_val))
9246 implicit_consts[ix] = false;
9247 }
9248 }
9249 die->die_abbrev = abbrev_id;
9250 }
9251 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9252 if (dwarf_version >= 5 && first_id != ~0U)
9253 optimize_implicit_const (first_id, i, implicit_consts);
9254 }
9255
9256 abbrev_opt_start = 0;
9257 abbrev_opt_base_type_end = 0;
9258 abbrev_usage_count.release ();
9259 sorted_abbrev_dies.release ();
9260 }
9261 \f
9262 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9263
9264 static int
9265 constant_size (unsigned HOST_WIDE_INT value)
9266 {
9267 int log;
9268
9269 if (value == 0)
9270 log = 0;
9271 else
9272 log = floor_log2 (value);
9273
9274 log = log / 8;
9275 log = 1 << (floor_log2 (log) + 1);
9276
9277 return log;
9278 }
9279
9280 /* Return the size of a DIE as it is represented in the
9281 .debug_info section. */
9282
9283 static unsigned long
9284 size_of_die (dw_die_ref die)
9285 {
9286 unsigned long size = 0;
9287 dw_attr_node *a;
9288 unsigned ix;
9289 enum dwarf_form form;
9290
9291 size += size_of_uleb128 (die->die_abbrev);
9292 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9293 {
9294 switch (AT_class (a))
9295 {
9296 case dw_val_class_addr:
9297 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9298 {
9299 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9300 size += size_of_uleb128 (AT_index (a));
9301 }
9302 else
9303 size += DWARF2_ADDR_SIZE;
9304 break;
9305 case dw_val_class_offset:
9306 size += DWARF_OFFSET_SIZE;
9307 break;
9308 case dw_val_class_loc:
9309 {
9310 unsigned long lsize = size_of_locs (AT_loc (a));
9311
9312 /* Block length. */
9313 if (dwarf_version >= 4)
9314 size += size_of_uleb128 (lsize);
9315 else
9316 size += constant_size (lsize);
9317 size += lsize;
9318 }
9319 break;
9320 case dw_val_class_loc_list:
9321 case dw_val_class_view_list:
9322 if (dwarf_split_debug_info && dwarf_version >= 5)
9323 {
9324 gcc_assert (AT_loc_list (a)->num_assigned);
9325 size += size_of_uleb128 (AT_loc_list (a)->hash);
9326 }
9327 else
9328 size += DWARF_OFFSET_SIZE;
9329 break;
9330 case dw_val_class_range_list:
9331 if (value_format (a) == DW_FORM_rnglistx)
9332 {
9333 gcc_assert (rnglist_idx);
9334 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9335 size += size_of_uleb128 (r->idx);
9336 }
9337 else
9338 size += DWARF_OFFSET_SIZE;
9339 break;
9340 case dw_val_class_const:
9341 size += size_of_sleb128 (AT_int (a));
9342 break;
9343 case dw_val_class_unsigned_const:
9344 {
9345 int csize = constant_size (AT_unsigned (a));
9346 if (dwarf_version == 3
9347 && a->dw_attr == DW_AT_data_member_location
9348 && csize >= 4)
9349 size += size_of_uleb128 (AT_unsigned (a));
9350 else
9351 size += csize;
9352 }
9353 break;
9354 case dw_val_class_symview:
9355 if (symview_upper_bound <= 0xff)
9356 size += 1;
9357 else if (symview_upper_bound <= 0xffff)
9358 size += 2;
9359 else if (symview_upper_bound <= 0xffffffff)
9360 size += 4;
9361 else
9362 size += 8;
9363 break;
9364 case dw_val_class_const_implicit:
9365 case dw_val_class_unsigned_const_implicit:
9366 case dw_val_class_file_implicit:
9367 /* These occupy no size in the DIE, just an extra sleb128 in
9368 .debug_abbrev. */
9369 break;
9370 case dw_val_class_const_double:
9371 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9372 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9373 size++; /* block */
9374 break;
9375 case dw_val_class_wide_int:
9376 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9377 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9378 if (get_full_len (*a->dw_attr_val.v.val_wide)
9379 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9380 size++; /* block */
9381 break;
9382 case dw_val_class_vec:
9383 size += constant_size (a->dw_attr_val.v.val_vec.length
9384 * a->dw_attr_val.v.val_vec.elt_size)
9385 + a->dw_attr_val.v.val_vec.length
9386 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9387 break;
9388 case dw_val_class_flag:
9389 if (dwarf_version >= 4)
9390 /* Currently all add_AT_flag calls pass in 1 as last argument,
9391 so DW_FORM_flag_present can be used. If that ever changes,
9392 we'll need to use DW_FORM_flag and have some optimization
9393 in build_abbrev_table that will change those to
9394 DW_FORM_flag_present if it is set to 1 in all DIEs using
9395 the same abbrev entry. */
9396 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9397 else
9398 size += 1;
9399 break;
9400 case dw_val_class_die_ref:
9401 if (AT_ref_external (a))
9402 {
9403 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9404 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9405 is sized by target address length, whereas in DWARF3
9406 it's always sized as an offset. */
9407 if (use_debug_types)
9408 size += DWARF_TYPE_SIGNATURE_SIZE;
9409 else if (dwarf_version == 2)
9410 size += DWARF2_ADDR_SIZE;
9411 else
9412 size += DWARF_OFFSET_SIZE;
9413 }
9414 else
9415 size += DWARF_OFFSET_SIZE;
9416 break;
9417 case dw_val_class_fde_ref:
9418 size += DWARF_OFFSET_SIZE;
9419 break;
9420 case dw_val_class_lbl_id:
9421 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9422 {
9423 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9424 size += size_of_uleb128 (AT_index (a));
9425 }
9426 else
9427 size += DWARF2_ADDR_SIZE;
9428 break;
9429 case dw_val_class_lineptr:
9430 case dw_val_class_macptr:
9431 case dw_val_class_loclistsptr:
9432 size += DWARF_OFFSET_SIZE;
9433 break;
9434 case dw_val_class_str:
9435 form = AT_string_form (a);
9436 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9437 size += DWARF_OFFSET_SIZE;
9438 else if (form == DW_FORM_GNU_str_index)
9439 size += size_of_uleb128 (AT_index (a));
9440 else
9441 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9442 break;
9443 case dw_val_class_file:
9444 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9445 break;
9446 case dw_val_class_data8:
9447 size += 8;
9448 break;
9449 case dw_val_class_vms_delta:
9450 size += DWARF_OFFSET_SIZE;
9451 break;
9452 case dw_val_class_high_pc:
9453 size += DWARF2_ADDR_SIZE;
9454 break;
9455 case dw_val_class_discr_value:
9456 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9457 break;
9458 case dw_val_class_discr_list:
9459 {
9460 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9461
9462 /* This is a block, so we have the block length and then its
9463 data. */
9464 size += constant_size (block_size) + block_size;
9465 }
9466 break;
9467 default:
9468 gcc_unreachable ();
9469 }
9470 }
9471
9472 return size;
9473 }
9474
9475 /* Size the debugging information associated with a given DIE. Visits the
9476 DIE's children recursively. Updates the global variable next_die_offset, on
9477 each time through. Uses the current value of next_die_offset to update the
9478 die_offset field in each DIE. */
9479
9480 static void
9481 calc_die_sizes (dw_die_ref die)
9482 {
9483 dw_die_ref c;
9484
9485 gcc_assert (die->die_offset == 0
9486 || (unsigned long int) die->die_offset == next_die_offset);
9487 die->die_offset = next_die_offset;
9488 next_die_offset += size_of_die (die);
9489
9490 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9491
9492 if (die->die_child != NULL)
9493 /* Count the null byte used to terminate sibling lists. */
9494 next_die_offset += 1;
9495 }
9496
9497 /* Size just the base type children at the start of the CU.
9498 This is needed because build_abbrev needs to size locs
9499 and sizing of type based stack ops needs to know die_offset
9500 values for the base types. */
9501
9502 static void
9503 calc_base_type_die_sizes (void)
9504 {
9505 unsigned long die_offset = (dwarf_split_debug_info
9506 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9507 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9508 unsigned int i;
9509 dw_die_ref base_type;
9510 #if ENABLE_ASSERT_CHECKING
9511 dw_die_ref prev = comp_unit_die ()->die_child;
9512 #endif
9513
9514 die_offset += size_of_die (comp_unit_die ());
9515 for (i = 0; base_types.iterate (i, &base_type); i++)
9516 {
9517 #if ENABLE_ASSERT_CHECKING
9518 gcc_assert (base_type->die_offset == 0
9519 && prev->die_sib == base_type
9520 && base_type->die_child == NULL
9521 && base_type->die_abbrev);
9522 prev = base_type;
9523 #endif
9524 if (abbrev_opt_start
9525 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9526 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9527 base_type->die_offset = die_offset;
9528 die_offset += size_of_die (base_type);
9529 }
9530 }
9531
9532 /* Set the marks for a die and its children. We do this so
9533 that we know whether or not a reference needs to use FORM_ref_addr; only
9534 DIEs in the same CU will be marked. We used to clear out the offset
9535 and use that as the flag, but ran into ordering problems. */
9536
9537 static void
9538 mark_dies (dw_die_ref die)
9539 {
9540 dw_die_ref c;
9541
9542 gcc_assert (!die->die_mark);
9543
9544 die->die_mark = 1;
9545 FOR_EACH_CHILD (die, c, mark_dies (c));
9546 }
9547
9548 /* Clear the marks for a die and its children. */
9549
9550 static void
9551 unmark_dies (dw_die_ref die)
9552 {
9553 dw_die_ref c;
9554
9555 if (! use_debug_types)
9556 gcc_assert (die->die_mark);
9557
9558 die->die_mark = 0;
9559 FOR_EACH_CHILD (die, c, unmark_dies (c));
9560 }
9561
9562 /* Clear the marks for a die, its children and referred dies. */
9563
9564 static void
9565 unmark_all_dies (dw_die_ref die)
9566 {
9567 dw_die_ref c;
9568 dw_attr_node *a;
9569 unsigned ix;
9570
9571 if (!die->die_mark)
9572 return;
9573 die->die_mark = 0;
9574
9575 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9576
9577 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9578 if (AT_class (a) == dw_val_class_die_ref)
9579 unmark_all_dies (AT_ref (a));
9580 }
9581
9582 /* Calculate if the entry should appear in the final output file. It may be
9583 from a pruned a type. */
9584
9585 static bool
9586 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9587 {
9588 /* By limiting gnu pubnames to definitions only, gold can generate a
9589 gdb index without entries for declarations, which don't include
9590 enough information to be useful. */
9591 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9592 return false;
9593
9594 if (table == pubname_table)
9595 {
9596 /* Enumerator names are part of the pubname table, but the
9597 parent DW_TAG_enumeration_type die may have been pruned.
9598 Don't output them if that is the case. */
9599 if (p->die->die_tag == DW_TAG_enumerator &&
9600 (p->die->die_parent == NULL
9601 || !p->die->die_parent->die_perennial_p))
9602 return false;
9603
9604 /* Everything else in the pubname table is included. */
9605 return true;
9606 }
9607
9608 /* The pubtypes table shouldn't include types that have been
9609 pruned. */
9610 return (p->die->die_offset != 0
9611 || !flag_eliminate_unused_debug_types);
9612 }
9613
9614 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9615 generated for the compilation unit. */
9616
9617 static unsigned long
9618 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9619 {
9620 unsigned long size;
9621 unsigned i;
9622 pubname_entry *p;
9623 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9624
9625 size = DWARF_PUBNAMES_HEADER_SIZE;
9626 FOR_EACH_VEC_ELT (*names, i, p)
9627 if (include_pubname_in_output (names, p))
9628 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9629
9630 size += DWARF_OFFSET_SIZE;
9631 return size;
9632 }
9633
9634 /* Return the size of the information in the .debug_aranges section. */
9635
9636 static unsigned long
9637 size_of_aranges (void)
9638 {
9639 unsigned long size;
9640
9641 size = DWARF_ARANGES_HEADER_SIZE;
9642
9643 /* Count the address/length pair for this compilation unit. */
9644 if (text_section_used)
9645 size += 2 * DWARF2_ADDR_SIZE;
9646 if (cold_text_section_used)
9647 size += 2 * DWARF2_ADDR_SIZE;
9648 if (have_multiple_function_sections)
9649 {
9650 unsigned fde_idx;
9651 dw_fde_ref fde;
9652
9653 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9654 {
9655 if (DECL_IGNORED_P (fde->decl))
9656 continue;
9657 if (!fde->in_std_section)
9658 size += 2 * DWARF2_ADDR_SIZE;
9659 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9660 size += 2 * DWARF2_ADDR_SIZE;
9661 }
9662 }
9663
9664 /* Count the two zero words used to terminated the address range table. */
9665 size += 2 * DWARF2_ADDR_SIZE;
9666 return size;
9667 }
9668 \f
9669 /* Select the encoding of an attribute value. */
9670
9671 static enum dwarf_form
9672 value_format (dw_attr_node *a)
9673 {
9674 switch (AT_class (a))
9675 {
9676 case dw_val_class_addr:
9677 /* Only very few attributes allow DW_FORM_addr. */
9678 switch (a->dw_attr)
9679 {
9680 case DW_AT_low_pc:
9681 case DW_AT_high_pc:
9682 case DW_AT_entry_pc:
9683 case DW_AT_trampoline:
9684 return (AT_index (a) == NOT_INDEXED
9685 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9686 default:
9687 break;
9688 }
9689 switch (DWARF2_ADDR_SIZE)
9690 {
9691 case 1:
9692 return DW_FORM_data1;
9693 case 2:
9694 return DW_FORM_data2;
9695 case 4:
9696 return DW_FORM_data4;
9697 case 8:
9698 return DW_FORM_data8;
9699 default:
9700 gcc_unreachable ();
9701 }
9702 case dw_val_class_loc_list:
9703 case dw_val_class_view_list:
9704 if (dwarf_split_debug_info
9705 && dwarf_version >= 5
9706 && AT_loc_list (a)->num_assigned)
9707 return DW_FORM_loclistx;
9708 /* FALLTHRU */
9709 case dw_val_class_range_list:
9710 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9711 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9712 care about sizes of .debug* sections in shared libraries and
9713 executables and don't take into account relocations that affect just
9714 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9715 table in the .debug_rnglists section. */
9716 if (dwarf_split_debug_info
9717 && dwarf_version >= 5
9718 && AT_class (a) == dw_val_class_range_list
9719 && rnglist_idx
9720 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9721 return DW_FORM_rnglistx;
9722 if (dwarf_version >= 4)
9723 return DW_FORM_sec_offset;
9724 /* FALLTHRU */
9725 case dw_val_class_vms_delta:
9726 case dw_val_class_offset:
9727 switch (DWARF_OFFSET_SIZE)
9728 {
9729 case 4:
9730 return DW_FORM_data4;
9731 case 8:
9732 return DW_FORM_data8;
9733 default:
9734 gcc_unreachable ();
9735 }
9736 case dw_val_class_loc:
9737 if (dwarf_version >= 4)
9738 return DW_FORM_exprloc;
9739 switch (constant_size (size_of_locs (AT_loc (a))))
9740 {
9741 case 1:
9742 return DW_FORM_block1;
9743 case 2:
9744 return DW_FORM_block2;
9745 case 4:
9746 return DW_FORM_block4;
9747 default:
9748 gcc_unreachable ();
9749 }
9750 case dw_val_class_const:
9751 return DW_FORM_sdata;
9752 case dw_val_class_unsigned_const:
9753 switch (constant_size (AT_unsigned (a)))
9754 {
9755 case 1:
9756 return DW_FORM_data1;
9757 case 2:
9758 return DW_FORM_data2;
9759 case 4:
9760 /* In DWARF3 DW_AT_data_member_location with
9761 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9762 constant, so we need to use DW_FORM_udata if we need
9763 a large constant. */
9764 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9765 return DW_FORM_udata;
9766 return DW_FORM_data4;
9767 case 8:
9768 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9769 return DW_FORM_udata;
9770 return DW_FORM_data8;
9771 default:
9772 gcc_unreachable ();
9773 }
9774 case dw_val_class_const_implicit:
9775 case dw_val_class_unsigned_const_implicit:
9776 case dw_val_class_file_implicit:
9777 return DW_FORM_implicit_const;
9778 case dw_val_class_const_double:
9779 switch (HOST_BITS_PER_WIDE_INT)
9780 {
9781 case 8:
9782 return DW_FORM_data2;
9783 case 16:
9784 return DW_FORM_data4;
9785 case 32:
9786 return DW_FORM_data8;
9787 case 64:
9788 if (dwarf_version >= 5)
9789 return DW_FORM_data16;
9790 /* FALLTHRU */
9791 default:
9792 return DW_FORM_block1;
9793 }
9794 case dw_val_class_wide_int:
9795 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9796 {
9797 case 8:
9798 return DW_FORM_data1;
9799 case 16:
9800 return DW_FORM_data2;
9801 case 32:
9802 return DW_FORM_data4;
9803 case 64:
9804 return DW_FORM_data8;
9805 case 128:
9806 if (dwarf_version >= 5)
9807 return DW_FORM_data16;
9808 /* FALLTHRU */
9809 default:
9810 return DW_FORM_block1;
9811 }
9812 case dw_val_class_symview:
9813 /* ??? We might use uleb128, but then we'd have to compute
9814 .debug_info offsets in the assembler. */
9815 if (symview_upper_bound <= 0xff)
9816 return DW_FORM_data1;
9817 else if (symview_upper_bound <= 0xffff)
9818 return DW_FORM_data2;
9819 else if (symview_upper_bound <= 0xffffffff)
9820 return DW_FORM_data4;
9821 else
9822 return DW_FORM_data8;
9823 case dw_val_class_vec:
9824 switch (constant_size (a->dw_attr_val.v.val_vec.length
9825 * a->dw_attr_val.v.val_vec.elt_size))
9826 {
9827 case 1:
9828 return DW_FORM_block1;
9829 case 2:
9830 return DW_FORM_block2;
9831 case 4:
9832 return DW_FORM_block4;
9833 default:
9834 gcc_unreachable ();
9835 }
9836 case dw_val_class_flag:
9837 if (dwarf_version >= 4)
9838 {
9839 /* Currently all add_AT_flag calls pass in 1 as last argument,
9840 so DW_FORM_flag_present can be used. If that ever changes,
9841 we'll need to use DW_FORM_flag and have some optimization
9842 in build_abbrev_table that will change those to
9843 DW_FORM_flag_present if it is set to 1 in all DIEs using
9844 the same abbrev entry. */
9845 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9846 return DW_FORM_flag_present;
9847 }
9848 return DW_FORM_flag;
9849 case dw_val_class_die_ref:
9850 if (AT_ref_external (a))
9851 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9852 else
9853 return DW_FORM_ref;
9854 case dw_val_class_fde_ref:
9855 return DW_FORM_data;
9856 case dw_val_class_lbl_id:
9857 return (AT_index (a) == NOT_INDEXED
9858 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9859 case dw_val_class_lineptr:
9860 case dw_val_class_macptr:
9861 case dw_val_class_loclistsptr:
9862 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9863 case dw_val_class_str:
9864 return AT_string_form (a);
9865 case dw_val_class_file:
9866 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9867 {
9868 case 1:
9869 return DW_FORM_data1;
9870 case 2:
9871 return DW_FORM_data2;
9872 case 4:
9873 return DW_FORM_data4;
9874 default:
9875 gcc_unreachable ();
9876 }
9877
9878 case dw_val_class_data8:
9879 return DW_FORM_data8;
9880
9881 case dw_val_class_high_pc:
9882 switch (DWARF2_ADDR_SIZE)
9883 {
9884 case 1:
9885 return DW_FORM_data1;
9886 case 2:
9887 return DW_FORM_data2;
9888 case 4:
9889 return DW_FORM_data4;
9890 case 8:
9891 return DW_FORM_data8;
9892 default:
9893 gcc_unreachable ();
9894 }
9895
9896 case dw_val_class_discr_value:
9897 return (a->dw_attr_val.v.val_discr_value.pos
9898 ? DW_FORM_udata
9899 : DW_FORM_sdata);
9900 case dw_val_class_discr_list:
9901 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9902 {
9903 case 1:
9904 return DW_FORM_block1;
9905 case 2:
9906 return DW_FORM_block2;
9907 case 4:
9908 return DW_FORM_block4;
9909 default:
9910 gcc_unreachable ();
9911 }
9912
9913 default:
9914 gcc_unreachable ();
9915 }
9916 }
9917
9918 /* Output the encoding of an attribute value. */
9919
9920 static void
9921 output_value_format (dw_attr_node *a)
9922 {
9923 enum dwarf_form form = value_format (a);
9924
9925 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9926 }
9927
9928 /* Given a die and id, produce the appropriate abbreviations. */
9929
9930 static void
9931 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9932 {
9933 unsigned ix;
9934 dw_attr_node *a_attr;
9935
9936 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9937 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9938 dwarf_tag_name (abbrev->die_tag));
9939
9940 if (abbrev->die_child != NULL)
9941 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9942 else
9943 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9944
9945 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9946 {
9947 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9948 dwarf_attr_name (a_attr->dw_attr));
9949 output_value_format (a_attr);
9950 if (value_format (a_attr) == DW_FORM_implicit_const)
9951 {
9952 if (AT_class (a_attr) == dw_val_class_file_implicit)
9953 {
9954 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9955 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9956 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9957 }
9958 else
9959 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9960 }
9961 }
9962
9963 dw2_asm_output_data (1, 0, NULL);
9964 dw2_asm_output_data (1, 0, NULL);
9965 }
9966
9967
9968 /* Output the .debug_abbrev section which defines the DIE abbreviation
9969 table. */
9970
9971 static void
9972 output_abbrev_section (void)
9973 {
9974 unsigned int abbrev_id;
9975 dw_die_ref abbrev;
9976
9977 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9978 if (abbrev_id != 0)
9979 output_die_abbrevs (abbrev_id, abbrev);
9980
9981 /* Terminate the table. */
9982 dw2_asm_output_data (1, 0, NULL);
9983 }
9984
9985 /* Return a new location list, given the begin and end range, and the
9986 expression. */
9987
9988 static inline dw_loc_list_ref
9989 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
9990 const char *end, var_loc_view vend,
9991 const char *section)
9992 {
9993 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9994
9995 retlist->begin = begin;
9996 retlist->begin_entry = NULL;
9997 retlist->end = end;
9998 retlist->expr = expr;
9999 retlist->section = section;
10000 retlist->vbegin = vbegin;
10001 retlist->vend = vend;
10002
10003 return retlist;
10004 }
10005
10006 /* Return true iff there's any nonzero view number in the loc list. */
10007
10008 static bool
10009 loc_list_has_views (dw_loc_list_ref list)
10010 {
10011 if (!debug_variable_location_views)
10012 return false;
10013
10014 for (dw_loc_list_ref loc = list;
10015 loc != NULL; loc = loc->dw_loc_next)
10016 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10017 return true;
10018
10019 return false;
10020 }
10021
10022 /* Generate a new internal symbol for this location list node, if it
10023 hasn't got one yet. */
10024
10025 static inline void
10026 gen_llsym (dw_loc_list_ref list)
10027 {
10028 gcc_assert (!list->ll_symbol);
10029 list->ll_symbol = gen_internal_sym ("LLST");
10030
10031 if (!loc_list_has_views (list))
10032 return;
10033
10034 if (dwarf2out_locviews_in_attribute ())
10035 {
10036 /* Use the same label_num for the view list. */
10037 label_num--;
10038 list->vl_symbol = gen_internal_sym ("LVUS");
10039 }
10040 else
10041 list->vl_symbol = list->ll_symbol;
10042 }
10043
10044 /* Generate a symbol for the list, but only if we really want to emit
10045 it as a list. */
10046
10047 static inline void
10048 maybe_gen_llsym (dw_loc_list_ref list)
10049 {
10050 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10051 return;
10052
10053 gen_llsym (list);
10054 }
10055
10056 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10057 NULL, don't consider size of the location expression. If we're not
10058 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10059 representation in *SIZEP. */
10060
10061 static bool
10062 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10063 {
10064 /* Don't output an entry that starts and ends at the same address. */
10065 if (strcmp (curr->begin, curr->end) == 0
10066 && curr->vbegin == curr->vend && !curr->force)
10067 return true;
10068
10069 if (!sizep)
10070 return false;
10071
10072 unsigned long size = size_of_locs (curr->expr);
10073
10074 /* If the expression is too large, drop it on the floor. We could
10075 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10076 in the expression, but >= 64KB expressions for a single value
10077 in a single range are unlikely very useful. */
10078 if (dwarf_version < 5 && size > 0xffff)
10079 return true;
10080
10081 *sizep = size;
10082
10083 return false;
10084 }
10085
10086 /* Output a view pair loclist entry for CURR, if it requires one. */
10087
10088 static void
10089 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10090 {
10091 if (!dwarf2out_locviews_in_loclist ())
10092 return;
10093
10094 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10095 return;
10096
10097 #ifdef DW_LLE_view_pair
10098 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10099
10100 if (dwarf2out_as_locview_support)
10101 {
10102 if (ZERO_VIEW_P (curr->vbegin))
10103 dw2_asm_output_data_uleb128 (0, "Location view begin");
10104 else
10105 {
10106 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10107 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10108 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10109 }
10110
10111 if (ZERO_VIEW_P (curr->vend))
10112 dw2_asm_output_data_uleb128 (0, "Location view end");
10113 else
10114 {
10115 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10116 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10117 dw2_asm_output_symname_uleb128 (label, "Location view end");
10118 }
10119 }
10120 else
10121 {
10122 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10123 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10124 }
10125 #endif /* DW_LLE_view_pair */
10126
10127 return;
10128 }
10129
10130 /* Output the location list given to us. */
10131
10132 static void
10133 output_loc_list (dw_loc_list_ref list_head)
10134 {
10135 int vcount = 0, lcount = 0;
10136
10137 if (list_head->emitted)
10138 return;
10139 list_head->emitted = true;
10140
10141 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10142 {
10143 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10144
10145 for (dw_loc_list_ref curr = list_head; curr != NULL;
10146 curr = curr->dw_loc_next)
10147 {
10148 unsigned long size;
10149
10150 if (skip_loc_list_entry (curr, &size))
10151 continue;
10152
10153 vcount++;
10154
10155 /* ?? dwarf_split_debug_info? */
10156 if (dwarf2out_as_locview_support)
10157 {
10158 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10159
10160 if (!ZERO_VIEW_P (curr->vbegin))
10161 {
10162 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10163 dw2_asm_output_symname_uleb128 (label,
10164 "View list begin (%s)",
10165 list_head->vl_symbol);
10166 }
10167 else
10168 dw2_asm_output_data_uleb128 (0,
10169 "View list begin (%s)",
10170 list_head->vl_symbol);
10171
10172 if (!ZERO_VIEW_P (curr->vend))
10173 {
10174 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10175 dw2_asm_output_symname_uleb128 (label,
10176 "View list end (%s)",
10177 list_head->vl_symbol);
10178 }
10179 else
10180 dw2_asm_output_data_uleb128 (0,
10181 "View list end (%s)",
10182 list_head->vl_symbol);
10183 }
10184 else
10185 {
10186 dw2_asm_output_data_uleb128 (curr->vbegin,
10187 "View list begin (%s)",
10188 list_head->vl_symbol);
10189 dw2_asm_output_data_uleb128 (curr->vend,
10190 "View list end (%s)",
10191 list_head->vl_symbol);
10192 }
10193 }
10194 }
10195
10196 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10197
10198 const char *last_section = NULL;
10199 const char *base_label = NULL;
10200
10201 /* Walk the location list, and output each range + expression. */
10202 for (dw_loc_list_ref curr = list_head; curr != NULL;
10203 curr = curr->dw_loc_next)
10204 {
10205 unsigned long size;
10206
10207 /* Skip this entry? If we skip it here, we must skip it in the
10208 view list above as well. */
10209 if (skip_loc_list_entry (curr, &size))
10210 continue;
10211
10212 lcount++;
10213
10214 if (dwarf_version >= 5)
10215 {
10216 if (dwarf_split_debug_info)
10217 {
10218 dwarf2out_maybe_output_loclist_view_pair (curr);
10219 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10220 uleb128 index into .debug_addr and uleb128 length. */
10221 dw2_asm_output_data (1, DW_LLE_startx_length,
10222 "DW_LLE_startx_length (%s)",
10223 list_head->ll_symbol);
10224 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10225 "Location list range start index "
10226 "(%s)", curr->begin);
10227 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10228 For that case we probably need to emit DW_LLE_startx_endx,
10229 but we'd need 2 .debug_addr entries rather than just one. */
10230 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10231 "Location list length (%s)",
10232 list_head->ll_symbol);
10233 }
10234 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10235 {
10236 dwarf2out_maybe_output_loclist_view_pair (curr);
10237 /* If all code is in .text section, the base address is
10238 already provided by the CU attributes. Use
10239 DW_LLE_offset_pair where both addresses are uleb128 encoded
10240 offsets against that base. */
10241 dw2_asm_output_data (1, DW_LLE_offset_pair,
10242 "DW_LLE_offset_pair (%s)",
10243 list_head->ll_symbol);
10244 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10245 "Location list begin address (%s)",
10246 list_head->ll_symbol);
10247 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10248 "Location list end address (%s)",
10249 list_head->ll_symbol);
10250 }
10251 else if (HAVE_AS_LEB128)
10252 {
10253 /* Otherwise, find out how many consecutive entries could share
10254 the same base entry. If just one, emit DW_LLE_start_length,
10255 otherwise emit DW_LLE_base_address for the base address
10256 followed by a series of DW_LLE_offset_pair. */
10257 if (last_section == NULL || curr->section != last_section)
10258 {
10259 dw_loc_list_ref curr2;
10260 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10261 curr2 = curr2->dw_loc_next)
10262 {
10263 if (strcmp (curr2->begin, curr2->end) == 0
10264 && !curr2->force)
10265 continue;
10266 break;
10267 }
10268 if (curr2 == NULL || curr->section != curr2->section)
10269 last_section = NULL;
10270 else
10271 {
10272 last_section = curr->section;
10273 base_label = curr->begin;
10274 dw2_asm_output_data (1, DW_LLE_base_address,
10275 "DW_LLE_base_address (%s)",
10276 list_head->ll_symbol);
10277 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10278 "Base address (%s)",
10279 list_head->ll_symbol);
10280 }
10281 }
10282 /* Only one entry with the same base address. Use
10283 DW_LLE_start_length with absolute address and uleb128
10284 length. */
10285 if (last_section == NULL)
10286 {
10287 dwarf2out_maybe_output_loclist_view_pair (curr);
10288 dw2_asm_output_data (1, DW_LLE_start_length,
10289 "DW_LLE_start_length (%s)",
10290 list_head->ll_symbol);
10291 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10292 "Location list begin address (%s)",
10293 list_head->ll_symbol);
10294 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10295 "Location list length "
10296 "(%s)", list_head->ll_symbol);
10297 }
10298 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10299 DW_LLE_base_address. */
10300 else
10301 {
10302 dwarf2out_maybe_output_loclist_view_pair (curr);
10303 dw2_asm_output_data (1, DW_LLE_offset_pair,
10304 "DW_LLE_offset_pair (%s)",
10305 list_head->ll_symbol);
10306 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10307 "Location list begin address "
10308 "(%s)", list_head->ll_symbol);
10309 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10310 "Location list end address "
10311 "(%s)", list_head->ll_symbol);
10312 }
10313 }
10314 /* The assembler does not support .uleb128 directive. Emit
10315 DW_LLE_start_end with a pair of absolute addresses. */
10316 else
10317 {
10318 dwarf2out_maybe_output_loclist_view_pair (curr);
10319 dw2_asm_output_data (1, DW_LLE_start_end,
10320 "DW_LLE_start_end (%s)",
10321 list_head->ll_symbol);
10322 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10323 "Location list begin address (%s)",
10324 list_head->ll_symbol);
10325 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10326 "Location list end address (%s)",
10327 list_head->ll_symbol);
10328 }
10329 }
10330 else if (dwarf_split_debug_info)
10331 {
10332 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10333 and 4 byte length. */
10334 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10335 "Location list start/length entry (%s)",
10336 list_head->ll_symbol);
10337 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10338 "Location list range start index (%s)",
10339 curr->begin);
10340 /* The length field is 4 bytes. If we ever need to support
10341 an 8-byte length, we can add a new DW_LLE code or fall back
10342 to DW_LLE_GNU_start_end_entry. */
10343 dw2_asm_output_delta (4, curr->end, curr->begin,
10344 "Location list range length (%s)",
10345 list_head->ll_symbol);
10346 }
10347 else if (!have_multiple_function_sections)
10348 {
10349 /* Pair of relative addresses against start of text section. */
10350 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10351 "Location list begin address (%s)",
10352 list_head->ll_symbol);
10353 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10354 "Location list end address (%s)",
10355 list_head->ll_symbol);
10356 }
10357 else
10358 {
10359 /* Pair of absolute addresses. */
10360 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10361 "Location list begin address (%s)",
10362 list_head->ll_symbol);
10363 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10364 "Location list end address (%s)",
10365 list_head->ll_symbol);
10366 }
10367
10368 /* Output the block length for this list of location operations. */
10369 if (dwarf_version >= 5)
10370 dw2_asm_output_data_uleb128 (size, "Location expression size");
10371 else
10372 {
10373 gcc_assert (size <= 0xffff);
10374 dw2_asm_output_data (2, size, "Location expression size");
10375 }
10376
10377 output_loc_sequence (curr->expr, -1);
10378 }
10379
10380 /* And finally list termination. */
10381 if (dwarf_version >= 5)
10382 dw2_asm_output_data (1, DW_LLE_end_of_list,
10383 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10384 else if (dwarf_split_debug_info)
10385 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10386 "Location list terminator (%s)",
10387 list_head->ll_symbol);
10388 else
10389 {
10390 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10391 "Location list terminator begin (%s)",
10392 list_head->ll_symbol);
10393 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10394 "Location list terminator end (%s)",
10395 list_head->ll_symbol);
10396 }
10397
10398 gcc_assert (!list_head->vl_symbol
10399 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10400 }
10401
10402 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10403 section. Emit a relocated reference if val_entry is NULL, otherwise,
10404 emit an indirect reference. */
10405
10406 static void
10407 output_range_list_offset (dw_attr_node *a)
10408 {
10409 const char *name = dwarf_attr_name (a->dw_attr);
10410
10411 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10412 {
10413 if (dwarf_version >= 5)
10414 {
10415 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10416 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10417 debug_ranges_section, "%s", name);
10418 }
10419 else
10420 {
10421 char *p = strchr (ranges_section_label, '\0');
10422 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10423 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10424 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10425 debug_ranges_section, "%s", name);
10426 *p = '\0';
10427 }
10428 }
10429 else if (dwarf_version >= 5)
10430 {
10431 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10432 gcc_assert (rnglist_idx);
10433 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10434 }
10435 else
10436 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10437 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10438 "%s (offset from %s)", name, ranges_section_label);
10439 }
10440
10441 /* Output the offset into the debug_loc section. */
10442
10443 static void
10444 output_loc_list_offset (dw_attr_node *a)
10445 {
10446 char *sym = AT_loc_list (a)->ll_symbol;
10447
10448 gcc_assert (sym);
10449 if (!dwarf_split_debug_info)
10450 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10451 "%s", dwarf_attr_name (a->dw_attr));
10452 else if (dwarf_version >= 5)
10453 {
10454 gcc_assert (AT_loc_list (a)->num_assigned);
10455 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10456 dwarf_attr_name (a->dw_attr),
10457 sym);
10458 }
10459 else
10460 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10461 "%s", dwarf_attr_name (a->dw_attr));
10462 }
10463
10464 /* Output the offset into the debug_loc section. */
10465
10466 static void
10467 output_view_list_offset (dw_attr_node *a)
10468 {
10469 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10470
10471 gcc_assert (sym);
10472 if (dwarf_split_debug_info)
10473 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10474 "%s", dwarf_attr_name (a->dw_attr));
10475 else
10476 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10477 "%s", dwarf_attr_name (a->dw_attr));
10478 }
10479
10480 /* Output an attribute's index or value appropriately. */
10481
10482 static void
10483 output_attr_index_or_value (dw_attr_node *a)
10484 {
10485 const char *name = dwarf_attr_name (a->dw_attr);
10486
10487 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10488 {
10489 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10490 return;
10491 }
10492 switch (AT_class (a))
10493 {
10494 case dw_val_class_addr:
10495 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10496 break;
10497 case dw_val_class_high_pc:
10498 case dw_val_class_lbl_id:
10499 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10500 break;
10501 default:
10502 gcc_unreachable ();
10503 }
10504 }
10505
10506 /* Output a type signature. */
10507
10508 static inline void
10509 output_signature (const char *sig, const char *name)
10510 {
10511 int i;
10512
10513 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10514 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10515 }
10516
10517 /* Output a discriminant value. */
10518
10519 static inline void
10520 output_discr_value (dw_discr_value *discr_value, const char *name)
10521 {
10522 if (discr_value->pos)
10523 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10524 else
10525 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10526 }
10527
10528 /* Output the DIE and its attributes. Called recursively to generate
10529 the definitions of each child DIE. */
10530
10531 static void
10532 output_die (dw_die_ref die)
10533 {
10534 dw_attr_node *a;
10535 dw_die_ref c;
10536 unsigned long size;
10537 unsigned ix;
10538
10539 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10540 (unsigned long)die->die_offset,
10541 dwarf_tag_name (die->die_tag));
10542
10543 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10544 {
10545 const char *name = dwarf_attr_name (a->dw_attr);
10546
10547 switch (AT_class (a))
10548 {
10549 case dw_val_class_addr:
10550 output_attr_index_or_value (a);
10551 break;
10552
10553 case dw_val_class_offset:
10554 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10555 "%s", name);
10556 break;
10557
10558 case dw_val_class_range_list:
10559 output_range_list_offset (a);
10560 break;
10561
10562 case dw_val_class_loc:
10563 size = size_of_locs (AT_loc (a));
10564
10565 /* Output the block length for this list of location operations. */
10566 if (dwarf_version >= 4)
10567 dw2_asm_output_data_uleb128 (size, "%s", name);
10568 else
10569 dw2_asm_output_data (constant_size (size), size, "%s", name);
10570
10571 output_loc_sequence (AT_loc (a), -1);
10572 break;
10573
10574 case dw_val_class_const:
10575 /* ??? It would be slightly more efficient to use a scheme like is
10576 used for unsigned constants below, but gdb 4.x does not sign
10577 extend. Gdb 5.x does sign extend. */
10578 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10579 break;
10580
10581 case dw_val_class_unsigned_const:
10582 {
10583 int csize = constant_size (AT_unsigned (a));
10584 if (dwarf_version == 3
10585 && a->dw_attr == DW_AT_data_member_location
10586 && csize >= 4)
10587 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10588 else
10589 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10590 }
10591 break;
10592
10593 case dw_val_class_symview:
10594 {
10595 int vsize;
10596 if (symview_upper_bound <= 0xff)
10597 vsize = 1;
10598 else if (symview_upper_bound <= 0xffff)
10599 vsize = 2;
10600 else if (symview_upper_bound <= 0xffffffff)
10601 vsize = 4;
10602 else
10603 vsize = 8;
10604 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10605 "%s", name);
10606 }
10607 break;
10608
10609 case dw_val_class_const_implicit:
10610 if (flag_debug_asm)
10611 fprintf (asm_out_file, "\t\t\t%s %s ("
10612 HOST_WIDE_INT_PRINT_DEC ")\n",
10613 ASM_COMMENT_START, name, AT_int (a));
10614 break;
10615
10616 case dw_val_class_unsigned_const_implicit:
10617 if (flag_debug_asm)
10618 fprintf (asm_out_file, "\t\t\t%s %s ("
10619 HOST_WIDE_INT_PRINT_HEX ")\n",
10620 ASM_COMMENT_START, name, AT_unsigned (a));
10621 break;
10622
10623 case dw_val_class_const_double:
10624 {
10625 unsigned HOST_WIDE_INT first, second;
10626
10627 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10628 dw2_asm_output_data (1,
10629 HOST_BITS_PER_DOUBLE_INT
10630 / HOST_BITS_PER_CHAR,
10631 NULL);
10632
10633 if (WORDS_BIG_ENDIAN)
10634 {
10635 first = a->dw_attr_val.v.val_double.high;
10636 second = a->dw_attr_val.v.val_double.low;
10637 }
10638 else
10639 {
10640 first = a->dw_attr_val.v.val_double.low;
10641 second = a->dw_attr_val.v.val_double.high;
10642 }
10643
10644 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10645 first, "%s", name);
10646 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10647 second, NULL);
10648 }
10649 break;
10650
10651 case dw_val_class_wide_int:
10652 {
10653 int i;
10654 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10655 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10656 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10657 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10658 * l, NULL);
10659
10660 if (WORDS_BIG_ENDIAN)
10661 for (i = len - 1; i >= 0; --i)
10662 {
10663 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10664 "%s", name);
10665 name = "";
10666 }
10667 else
10668 for (i = 0; i < len; ++i)
10669 {
10670 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10671 "%s", name);
10672 name = "";
10673 }
10674 }
10675 break;
10676
10677 case dw_val_class_vec:
10678 {
10679 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10680 unsigned int len = a->dw_attr_val.v.val_vec.length;
10681 unsigned int i;
10682 unsigned char *p;
10683
10684 dw2_asm_output_data (constant_size (len * elt_size),
10685 len * elt_size, "%s", name);
10686 if (elt_size > sizeof (HOST_WIDE_INT))
10687 {
10688 elt_size /= 2;
10689 len *= 2;
10690 }
10691 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10692 i < len;
10693 i++, p += elt_size)
10694 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10695 "fp or vector constant word %u", i);
10696 break;
10697 }
10698
10699 case dw_val_class_flag:
10700 if (dwarf_version >= 4)
10701 {
10702 /* Currently all add_AT_flag calls pass in 1 as last argument,
10703 so DW_FORM_flag_present can be used. If that ever changes,
10704 we'll need to use DW_FORM_flag and have some optimization
10705 in build_abbrev_table that will change those to
10706 DW_FORM_flag_present if it is set to 1 in all DIEs using
10707 the same abbrev entry. */
10708 gcc_assert (AT_flag (a) == 1);
10709 if (flag_debug_asm)
10710 fprintf (asm_out_file, "\t\t\t%s %s\n",
10711 ASM_COMMENT_START, name);
10712 break;
10713 }
10714 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10715 break;
10716
10717 case dw_val_class_loc_list:
10718 output_loc_list_offset (a);
10719 break;
10720
10721 case dw_val_class_view_list:
10722 output_view_list_offset (a);
10723 break;
10724
10725 case dw_val_class_die_ref:
10726 if (AT_ref_external (a))
10727 {
10728 if (AT_ref (a)->comdat_type_p)
10729 {
10730 comdat_type_node *type_node
10731 = AT_ref (a)->die_id.die_type_node;
10732
10733 gcc_assert (type_node);
10734 output_signature (type_node->signature, name);
10735 }
10736 else
10737 {
10738 const char *sym = AT_ref (a)->die_id.die_symbol;
10739 int size;
10740
10741 gcc_assert (sym);
10742 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10743 length, whereas in DWARF3 it's always sized as an
10744 offset. */
10745 if (dwarf_version == 2)
10746 size = DWARF2_ADDR_SIZE;
10747 else
10748 size = DWARF_OFFSET_SIZE;
10749 /* ??? We cannot unconditionally output die_offset if
10750 non-zero - others might create references to those
10751 DIEs via symbols.
10752 And we do not clear its DIE offset after outputting it
10753 (and the label refers to the actual DIEs, not the
10754 DWARF CU unit header which is when using label + offset
10755 would be the correct thing to do).
10756 ??? This is the reason for the with_offset flag. */
10757 if (AT_ref (a)->with_offset)
10758 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10759 debug_info_section, "%s", name);
10760 else
10761 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10762 name);
10763 }
10764 }
10765 else
10766 {
10767 gcc_assert (AT_ref (a)->die_offset);
10768 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10769 "%s", name);
10770 }
10771 break;
10772
10773 case dw_val_class_fde_ref:
10774 {
10775 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10776
10777 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10778 a->dw_attr_val.v.val_fde_index * 2);
10779 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10780 "%s", name);
10781 }
10782 break;
10783
10784 case dw_val_class_vms_delta:
10785 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10786 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10787 AT_vms_delta2 (a), AT_vms_delta1 (a),
10788 "%s", name);
10789 #else
10790 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10791 AT_vms_delta2 (a), AT_vms_delta1 (a),
10792 "%s", name);
10793 #endif
10794 break;
10795
10796 case dw_val_class_lbl_id:
10797 output_attr_index_or_value (a);
10798 break;
10799
10800 case dw_val_class_lineptr:
10801 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10802 debug_line_section, "%s", name);
10803 break;
10804
10805 case dw_val_class_macptr:
10806 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10807 debug_macinfo_section, "%s", name);
10808 break;
10809
10810 case dw_val_class_loclistsptr:
10811 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10812 debug_loc_section, "%s", name);
10813 break;
10814
10815 case dw_val_class_str:
10816 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10817 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10818 a->dw_attr_val.v.val_str->label,
10819 debug_str_section,
10820 "%s: \"%s\"", name, AT_string (a));
10821 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10822 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10823 a->dw_attr_val.v.val_str->label,
10824 debug_line_str_section,
10825 "%s: \"%s\"", name, AT_string (a));
10826 else if (a->dw_attr_val.v.val_str->form == DW_FORM_GNU_str_index)
10827 dw2_asm_output_data_uleb128 (AT_index (a),
10828 "%s: \"%s\"", name, AT_string (a));
10829 else
10830 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10831 break;
10832
10833 case dw_val_class_file:
10834 {
10835 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10836
10837 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10838 a->dw_attr_val.v.val_file->filename);
10839 break;
10840 }
10841
10842 case dw_val_class_file_implicit:
10843 if (flag_debug_asm)
10844 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10845 ASM_COMMENT_START, name,
10846 maybe_emit_file (a->dw_attr_val.v.val_file),
10847 a->dw_attr_val.v.val_file->filename);
10848 break;
10849
10850 case dw_val_class_data8:
10851 {
10852 int i;
10853
10854 for (i = 0; i < 8; i++)
10855 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10856 i == 0 ? "%s" : NULL, name);
10857 break;
10858 }
10859
10860 case dw_val_class_high_pc:
10861 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10862 get_AT_low_pc (die), "DW_AT_high_pc");
10863 break;
10864
10865 case dw_val_class_discr_value:
10866 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10867 break;
10868
10869 case dw_val_class_discr_list:
10870 {
10871 dw_discr_list_ref list = AT_discr_list (a);
10872 const int size = size_of_discr_list (list);
10873
10874 /* This is a block, so output its length first. */
10875 dw2_asm_output_data (constant_size (size), size,
10876 "%s: block size", name);
10877
10878 for (; list != NULL; list = list->dw_discr_next)
10879 {
10880 /* One byte for the discriminant value descriptor, and then as
10881 many LEB128 numbers as required. */
10882 if (list->dw_discr_range)
10883 dw2_asm_output_data (1, DW_DSC_range,
10884 "%s: DW_DSC_range", name);
10885 else
10886 dw2_asm_output_data (1, DW_DSC_label,
10887 "%s: DW_DSC_label", name);
10888
10889 output_discr_value (&list->dw_discr_lower_bound, name);
10890 if (list->dw_discr_range)
10891 output_discr_value (&list->dw_discr_upper_bound, name);
10892 }
10893 break;
10894 }
10895
10896 default:
10897 gcc_unreachable ();
10898 }
10899 }
10900
10901 FOR_EACH_CHILD (die, c, output_die (c));
10902
10903 /* Add null byte to terminate sibling list. */
10904 if (die->die_child != NULL)
10905 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10906 (unsigned long) die->die_offset);
10907 }
10908
10909 /* Output the dwarf version number. */
10910
10911 static void
10912 output_dwarf_version ()
10913 {
10914 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10915 views in loclist. That will change eventually. */
10916 if (dwarf_version == 6)
10917 {
10918 static bool once;
10919 if (!once)
10920 {
10921 warning (0,
10922 "-gdwarf-6 is output as version 5 with incompatibilities");
10923 once = true;
10924 }
10925 dw2_asm_output_data (2, 5, "DWARF version number");
10926 }
10927 else
10928 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10929 }
10930
10931 /* Output the compilation unit that appears at the beginning of the
10932 .debug_info section, and precedes the DIE descriptions. */
10933
10934 static void
10935 output_compilation_unit_header (enum dwarf_unit_type ut)
10936 {
10937 if (!XCOFF_DEBUGGING_INFO)
10938 {
10939 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10940 dw2_asm_output_data (4, 0xffffffff,
10941 "Initial length escape value indicating 64-bit DWARF extension");
10942 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10943 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10944 "Length of Compilation Unit Info");
10945 }
10946
10947 output_dwarf_version ();
10948 if (dwarf_version >= 5)
10949 {
10950 const char *name;
10951 switch (ut)
10952 {
10953 case DW_UT_compile: name = "DW_UT_compile"; break;
10954 case DW_UT_type: name = "DW_UT_type"; break;
10955 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10956 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10957 default: gcc_unreachable ();
10958 }
10959 dw2_asm_output_data (1, ut, "%s", name);
10960 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10961 }
10962 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10963 debug_abbrev_section,
10964 "Offset Into Abbrev. Section");
10965 if (dwarf_version < 5)
10966 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10967 }
10968
10969 /* Output the compilation unit DIE and its children. */
10970
10971 static void
10972 output_comp_unit (dw_die_ref die, int output_if_empty,
10973 const unsigned char *dwo_id)
10974 {
10975 const char *secname, *oldsym;
10976 char *tmp;
10977
10978 /* Unless we are outputting main CU, we may throw away empty ones. */
10979 if (!output_if_empty && die->die_child == NULL)
10980 return;
10981
10982 /* Even if there are no children of this DIE, we must output the information
10983 about the compilation unit. Otherwise, on an empty translation unit, we
10984 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
10985 will then complain when examining the file. First mark all the DIEs in
10986 this CU so we know which get local refs. */
10987 mark_dies (die);
10988
10989 external_ref_hash_type *extern_map = optimize_external_refs (die);
10990
10991 /* For now, optimize only the main CU, in order to optimize the rest
10992 we'd need to see all of them earlier. Leave the rest for post-linking
10993 tools like DWZ. */
10994 if (die == comp_unit_die ())
10995 abbrev_opt_start = vec_safe_length (abbrev_die_table);
10996
10997 build_abbrev_table (die, extern_map);
10998
10999 optimize_abbrev_table ();
11000
11001 delete extern_map;
11002
11003 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11004 next_die_offset = (dwo_id
11005 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11006 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11007 calc_die_sizes (die);
11008
11009 oldsym = die->die_id.die_symbol;
11010 if (oldsym && die->comdat_type_p)
11011 {
11012 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11013
11014 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11015 secname = tmp;
11016 die->die_id.die_symbol = NULL;
11017 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11018 }
11019 else
11020 {
11021 switch_to_section (debug_info_section);
11022 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11023 info_section_emitted = true;
11024 }
11025
11026 /* For LTO cross unit DIE refs we want a symbol on the start of the
11027 debuginfo section, not on the CU DIE. */
11028 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11029 {
11030 /* ??? No way to get visibility assembled without a decl. */
11031 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11032 get_identifier (oldsym), char_type_node);
11033 TREE_PUBLIC (decl) = true;
11034 TREE_STATIC (decl) = true;
11035 DECL_ARTIFICIAL (decl) = true;
11036 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11037 DECL_VISIBILITY_SPECIFIED (decl) = true;
11038 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11039 #ifdef ASM_WEAKEN_LABEL
11040 /* We prefer a .weak because that handles duplicates from duplicate
11041 archive members in a graceful way. */
11042 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11043 #else
11044 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11045 #endif
11046 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11047 }
11048
11049 /* Output debugging information. */
11050 output_compilation_unit_header (dwo_id
11051 ? DW_UT_split_compile : DW_UT_compile);
11052 if (dwarf_version >= 5)
11053 {
11054 if (dwo_id != NULL)
11055 for (int i = 0; i < 8; i++)
11056 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11057 }
11058 output_die (die);
11059
11060 /* Leave the marks on the main CU, so we can check them in
11061 output_pubnames. */
11062 if (oldsym)
11063 {
11064 unmark_dies (die);
11065 die->die_id.die_symbol = oldsym;
11066 }
11067 }
11068
11069 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11070 and .debug_pubtypes. This is configured per-target, but can be
11071 overridden by the -gpubnames or -gno-pubnames options. */
11072
11073 static inline bool
11074 want_pubnames (void)
11075 {
11076 if (debug_info_level <= DINFO_LEVEL_TERSE)
11077 return false;
11078 if (debug_generate_pub_sections != -1)
11079 return debug_generate_pub_sections;
11080 return targetm.want_debug_pub_sections;
11081 }
11082
11083 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11084
11085 static void
11086 add_AT_pubnames (dw_die_ref die)
11087 {
11088 if (want_pubnames ())
11089 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11090 }
11091
11092 /* Add a string attribute value to a skeleton DIE. */
11093
11094 static inline void
11095 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11096 const char *str)
11097 {
11098 dw_attr_node attr;
11099 struct indirect_string_node *node;
11100
11101 if (! skeleton_debug_str_hash)
11102 skeleton_debug_str_hash
11103 = hash_table<indirect_string_hasher>::create_ggc (10);
11104
11105 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11106 find_string_form (node);
11107 if (node->form == DW_FORM_GNU_str_index)
11108 node->form = DW_FORM_strp;
11109
11110 attr.dw_attr = attr_kind;
11111 attr.dw_attr_val.val_class = dw_val_class_str;
11112 attr.dw_attr_val.val_entry = NULL;
11113 attr.dw_attr_val.v.val_str = node;
11114 add_dwarf_attr (die, &attr);
11115 }
11116
11117 /* Helper function to generate top-level dies for skeleton debug_info and
11118 debug_types. */
11119
11120 static void
11121 add_top_level_skeleton_die_attrs (dw_die_ref die)
11122 {
11123 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11124 const char *comp_dir = comp_dir_string ();
11125
11126 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11127 if (comp_dir != NULL)
11128 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11129 add_AT_pubnames (die);
11130 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11131 }
11132
11133 /* Output skeleton debug sections that point to the dwo file. */
11134
11135 static void
11136 output_skeleton_debug_sections (dw_die_ref comp_unit,
11137 const unsigned char *dwo_id)
11138 {
11139 /* These attributes will be found in the full debug_info section. */
11140 remove_AT (comp_unit, DW_AT_producer);
11141 remove_AT (comp_unit, DW_AT_language);
11142
11143 switch_to_section (debug_skeleton_info_section);
11144 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11145
11146 /* Produce the skeleton compilation-unit header. This one differs enough from
11147 a normal CU header that it's better not to call output_compilation_unit
11148 header. */
11149 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11150 dw2_asm_output_data (4, 0xffffffff,
11151 "Initial length escape value indicating 64-bit "
11152 "DWARF extension");
11153
11154 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11155 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11156 - DWARF_INITIAL_LENGTH_SIZE
11157 + size_of_die (comp_unit),
11158 "Length of Compilation Unit Info");
11159 output_dwarf_version ();
11160 if (dwarf_version >= 5)
11161 {
11162 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11163 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11164 }
11165 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11166 debug_skeleton_abbrev_section,
11167 "Offset Into Abbrev. Section");
11168 if (dwarf_version < 5)
11169 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11170 else
11171 for (int i = 0; i < 8; i++)
11172 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11173
11174 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11175 output_die (comp_unit);
11176
11177 /* Build the skeleton debug_abbrev section. */
11178 switch_to_section (debug_skeleton_abbrev_section);
11179 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11180
11181 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11182
11183 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11184 }
11185
11186 /* Output a comdat type unit DIE and its children. */
11187
11188 static void
11189 output_comdat_type_unit (comdat_type_node *node)
11190 {
11191 const char *secname;
11192 char *tmp;
11193 int i;
11194 #if defined (OBJECT_FORMAT_ELF)
11195 tree comdat_key;
11196 #endif
11197
11198 /* First mark all the DIEs in this CU so we know which get local refs. */
11199 mark_dies (node->root_die);
11200
11201 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11202
11203 build_abbrev_table (node->root_die, extern_map);
11204
11205 delete extern_map;
11206 extern_map = NULL;
11207
11208 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11209 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11210 calc_die_sizes (node->root_die);
11211
11212 #if defined (OBJECT_FORMAT_ELF)
11213 if (dwarf_version >= 5)
11214 {
11215 if (!dwarf_split_debug_info)
11216 secname = ".debug_info";
11217 else
11218 secname = ".debug_info.dwo";
11219 }
11220 else if (!dwarf_split_debug_info)
11221 secname = ".debug_types";
11222 else
11223 secname = ".debug_types.dwo";
11224
11225 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11226 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11227 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11228 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11229 comdat_key = get_identifier (tmp);
11230 targetm.asm_out.named_section (secname,
11231 SECTION_DEBUG | SECTION_LINKONCE,
11232 comdat_key);
11233 #else
11234 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11235 sprintf (tmp, (dwarf_version >= 5
11236 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11237 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11238 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11239 secname = tmp;
11240 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11241 #endif
11242
11243 /* Output debugging information. */
11244 output_compilation_unit_header (dwarf_split_debug_info
11245 ? DW_UT_split_type : DW_UT_type);
11246 output_signature (node->signature, "Type Signature");
11247 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11248 "Offset to Type DIE");
11249 output_die (node->root_die);
11250
11251 unmark_dies (node->root_die);
11252 }
11253
11254 /* Return the DWARF2/3 pubname associated with a decl. */
11255
11256 static const char *
11257 dwarf2_name (tree decl, int scope)
11258 {
11259 if (DECL_NAMELESS (decl))
11260 return NULL;
11261 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11262 }
11263
11264 /* Add a new entry to .debug_pubnames if appropriate. */
11265
11266 static void
11267 add_pubname_string (const char *str, dw_die_ref die)
11268 {
11269 pubname_entry e;
11270
11271 e.die = die;
11272 e.name = xstrdup (str);
11273 vec_safe_push (pubname_table, e);
11274 }
11275
11276 static void
11277 add_pubname (tree decl, dw_die_ref die)
11278 {
11279 if (!want_pubnames ())
11280 return;
11281
11282 /* Don't add items to the table when we expect that the consumer will have
11283 just read the enclosing die. For example, if the consumer is looking at a
11284 class_member, it will either be inside the class already, or will have just
11285 looked up the class to find the member. Either way, searching the class is
11286 faster than searching the index. */
11287 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11288 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11289 {
11290 const char *name = dwarf2_name (decl, 1);
11291
11292 if (name)
11293 add_pubname_string (name, die);
11294 }
11295 }
11296
11297 /* Add an enumerator to the pubnames section. */
11298
11299 static void
11300 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11301 {
11302 pubname_entry e;
11303
11304 gcc_assert (scope_name);
11305 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11306 e.die = die;
11307 vec_safe_push (pubname_table, e);
11308 }
11309
11310 /* Add a new entry to .debug_pubtypes if appropriate. */
11311
11312 static void
11313 add_pubtype (tree decl, dw_die_ref die)
11314 {
11315 pubname_entry e;
11316
11317 if (!want_pubnames ())
11318 return;
11319
11320 if ((TREE_PUBLIC (decl)
11321 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11322 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11323 {
11324 tree scope = NULL;
11325 const char *scope_name = "";
11326 const char *sep = is_cxx () ? "::" : ".";
11327 const char *name;
11328
11329 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11330 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11331 {
11332 scope_name = lang_hooks.dwarf_name (scope, 1);
11333 if (scope_name != NULL && scope_name[0] != '\0')
11334 scope_name = concat (scope_name, sep, NULL);
11335 else
11336 scope_name = "";
11337 }
11338
11339 if (TYPE_P (decl))
11340 name = type_tag (decl);
11341 else
11342 name = lang_hooks.dwarf_name (decl, 1);
11343
11344 /* If we don't have a name for the type, there's no point in adding
11345 it to the table. */
11346 if (name != NULL && name[0] != '\0')
11347 {
11348 e.die = die;
11349 e.name = concat (scope_name, name, NULL);
11350 vec_safe_push (pubtype_table, e);
11351 }
11352
11353 /* Although it might be more consistent to add the pubinfo for the
11354 enumerators as their dies are created, they should only be added if the
11355 enum type meets the criteria above. So rather than re-check the parent
11356 enum type whenever an enumerator die is created, just output them all
11357 here. This isn't protected by the name conditional because anonymous
11358 enums don't have names. */
11359 if (die->die_tag == DW_TAG_enumeration_type)
11360 {
11361 dw_die_ref c;
11362
11363 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11364 }
11365 }
11366 }
11367
11368 /* Output a single entry in the pubnames table. */
11369
11370 static void
11371 output_pubname (dw_offset die_offset, pubname_entry *entry)
11372 {
11373 dw_die_ref die = entry->die;
11374 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11375
11376 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11377
11378 if (debug_generate_pub_sections == 2)
11379 {
11380 /* This logic follows gdb's method for determining the value of the flag
11381 byte. */
11382 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11383 switch (die->die_tag)
11384 {
11385 case DW_TAG_typedef:
11386 case DW_TAG_base_type:
11387 case DW_TAG_subrange_type:
11388 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11389 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11390 break;
11391 case DW_TAG_enumerator:
11392 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11393 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11394 if (!is_cxx ())
11395 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11396 break;
11397 case DW_TAG_subprogram:
11398 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11399 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11400 if (!is_ada ())
11401 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11402 break;
11403 case DW_TAG_constant:
11404 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11405 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11406 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11407 break;
11408 case DW_TAG_variable:
11409 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11410 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11411 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11412 break;
11413 case DW_TAG_namespace:
11414 case DW_TAG_imported_declaration:
11415 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11416 break;
11417 case DW_TAG_class_type:
11418 case DW_TAG_interface_type:
11419 case DW_TAG_structure_type:
11420 case DW_TAG_union_type:
11421 case DW_TAG_enumeration_type:
11422 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11423 if (!is_cxx ())
11424 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11425 break;
11426 default:
11427 /* An unusual tag. Leave the flag-byte empty. */
11428 break;
11429 }
11430 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11431 "GDB-index flags");
11432 }
11433
11434 dw2_asm_output_nstring (entry->name, -1, "external name");
11435 }
11436
11437
11438 /* Output the public names table used to speed up access to externally
11439 visible names; or the public types table used to find type definitions. */
11440
11441 static void
11442 output_pubnames (vec<pubname_entry, va_gc> *names)
11443 {
11444 unsigned i;
11445 unsigned long pubnames_length = size_of_pubnames (names);
11446 pubname_entry *pub;
11447
11448 if (!XCOFF_DEBUGGING_INFO)
11449 {
11450 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11451 dw2_asm_output_data (4, 0xffffffff,
11452 "Initial length escape value indicating 64-bit DWARF extension");
11453 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11454 "Pub Info Length");
11455 }
11456
11457 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11458 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11459
11460 if (dwarf_split_debug_info)
11461 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11462 debug_skeleton_info_section,
11463 "Offset of Compilation Unit Info");
11464 else
11465 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11466 debug_info_section,
11467 "Offset of Compilation Unit Info");
11468 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11469 "Compilation Unit Length");
11470
11471 FOR_EACH_VEC_ELT (*names, i, pub)
11472 {
11473 if (include_pubname_in_output (names, pub))
11474 {
11475 dw_offset die_offset = pub->die->die_offset;
11476
11477 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11478 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11479 gcc_assert (pub->die->die_mark);
11480
11481 /* If we're putting types in their own .debug_types sections,
11482 the .debug_pubtypes table will still point to the compile
11483 unit (not the type unit), so we want to use the offset of
11484 the skeleton DIE (if there is one). */
11485 if (pub->die->comdat_type_p && names == pubtype_table)
11486 {
11487 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11488
11489 if (type_node != NULL)
11490 die_offset = (type_node->skeleton_die != NULL
11491 ? type_node->skeleton_die->die_offset
11492 : comp_unit_die ()->die_offset);
11493 }
11494
11495 output_pubname (die_offset, pub);
11496 }
11497 }
11498
11499 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11500 }
11501
11502 /* Output public names and types tables if necessary. */
11503
11504 static void
11505 output_pubtables (void)
11506 {
11507 if (!want_pubnames () || !info_section_emitted)
11508 return;
11509
11510 switch_to_section (debug_pubnames_section);
11511 output_pubnames (pubname_table);
11512 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11513 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11514 simply won't look for the section. */
11515 switch_to_section (debug_pubtypes_section);
11516 output_pubnames (pubtype_table);
11517 }
11518
11519
11520 /* Output the information that goes into the .debug_aranges table.
11521 Namely, define the beginning and ending address range of the
11522 text section generated for this compilation unit. */
11523
11524 static void
11525 output_aranges (void)
11526 {
11527 unsigned i;
11528 unsigned long aranges_length = size_of_aranges ();
11529
11530 if (!XCOFF_DEBUGGING_INFO)
11531 {
11532 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11533 dw2_asm_output_data (4, 0xffffffff,
11534 "Initial length escape value indicating 64-bit DWARF extension");
11535 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11536 "Length of Address Ranges Info");
11537 }
11538
11539 /* Version number for aranges is still 2, even up to DWARF5. */
11540 dw2_asm_output_data (2, 2, "DWARF aranges version");
11541 if (dwarf_split_debug_info)
11542 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11543 debug_skeleton_info_section,
11544 "Offset of Compilation Unit Info");
11545 else
11546 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11547 debug_info_section,
11548 "Offset of Compilation Unit Info");
11549 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11550 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11551
11552 /* We need to align to twice the pointer size here. */
11553 if (DWARF_ARANGES_PAD_SIZE)
11554 {
11555 /* Pad using a 2 byte words so that padding is correct for any
11556 pointer size. */
11557 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11558 2 * DWARF2_ADDR_SIZE);
11559 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11560 dw2_asm_output_data (2, 0, NULL);
11561 }
11562
11563 /* It is necessary not to output these entries if the sections were
11564 not used; if the sections were not used, the length will be 0 and
11565 the address may end up as 0 if the section is discarded by ld
11566 --gc-sections, leaving an invalid (0, 0) entry that can be
11567 confused with the terminator. */
11568 if (text_section_used)
11569 {
11570 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11571 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11572 text_section_label, "Length");
11573 }
11574 if (cold_text_section_used)
11575 {
11576 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11577 "Address");
11578 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11579 cold_text_section_label, "Length");
11580 }
11581
11582 if (have_multiple_function_sections)
11583 {
11584 unsigned fde_idx;
11585 dw_fde_ref fde;
11586
11587 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11588 {
11589 if (DECL_IGNORED_P (fde->decl))
11590 continue;
11591 if (!fde->in_std_section)
11592 {
11593 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11594 "Address");
11595 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11596 fde->dw_fde_begin, "Length");
11597 }
11598 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11599 {
11600 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11601 "Address");
11602 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11603 fde->dw_fde_second_begin, "Length");
11604 }
11605 }
11606 }
11607
11608 /* Output the terminator words. */
11609 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11610 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11611 }
11612
11613 /* Add a new entry to .debug_ranges. Return its index into
11614 ranges_table vector. */
11615
11616 static unsigned int
11617 add_ranges_num (int num, bool maybe_new_sec)
11618 {
11619 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11620 vec_safe_push (ranges_table, r);
11621 return vec_safe_length (ranges_table) - 1;
11622 }
11623
11624 /* Add a new entry to .debug_ranges corresponding to a block, or a
11625 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11626 this entry might be in a different section from previous range. */
11627
11628 static unsigned int
11629 add_ranges (const_tree block, bool maybe_new_sec)
11630 {
11631 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11632 }
11633
11634 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11635 chain, or middle entry of a chain that will be directly referred to. */
11636
11637 static void
11638 note_rnglist_head (unsigned int offset)
11639 {
11640 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11641 return;
11642 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11643 }
11644
11645 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11646 When using dwarf_split_debug_info, address attributes in dies destined
11647 for the final executable should be direct references--setting the
11648 parameter force_direct ensures this behavior. */
11649
11650 static void
11651 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11652 bool *added, bool force_direct)
11653 {
11654 unsigned int in_use = vec_safe_length (ranges_by_label);
11655 unsigned int offset;
11656 dw_ranges_by_label rbl = { begin, end };
11657 vec_safe_push (ranges_by_label, rbl);
11658 offset = add_ranges_num (-(int)in_use - 1, true);
11659 if (!*added)
11660 {
11661 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11662 *added = true;
11663 note_rnglist_head (offset);
11664 }
11665 }
11666
11667 /* Emit .debug_ranges section. */
11668
11669 static void
11670 output_ranges (void)
11671 {
11672 unsigned i;
11673 static const char *const start_fmt = "Offset %#x";
11674 const char *fmt = start_fmt;
11675 dw_ranges *r;
11676
11677 switch_to_section (debug_ranges_section);
11678 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11679 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11680 {
11681 int block_num = r->num;
11682
11683 if (block_num > 0)
11684 {
11685 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11686 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11687
11688 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11689 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11690
11691 /* If all code is in the text section, then the compilation
11692 unit base address defaults to DW_AT_low_pc, which is the
11693 base of the text section. */
11694 if (!have_multiple_function_sections)
11695 {
11696 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11697 text_section_label,
11698 fmt, i * 2 * DWARF2_ADDR_SIZE);
11699 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11700 text_section_label, NULL);
11701 }
11702
11703 /* Otherwise, the compilation unit base address is zero,
11704 which allows us to use absolute addresses, and not worry
11705 about whether the target supports cross-section
11706 arithmetic. */
11707 else
11708 {
11709 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11710 fmt, i * 2 * DWARF2_ADDR_SIZE);
11711 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11712 }
11713
11714 fmt = NULL;
11715 }
11716
11717 /* Negative block_num stands for an index into ranges_by_label. */
11718 else if (block_num < 0)
11719 {
11720 int lab_idx = - block_num - 1;
11721
11722 if (!have_multiple_function_sections)
11723 {
11724 gcc_unreachable ();
11725 #if 0
11726 /* If we ever use add_ranges_by_labels () for a single
11727 function section, all we have to do is to take out
11728 the #if 0 above. */
11729 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11730 (*ranges_by_label)[lab_idx].begin,
11731 text_section_label,
11732 fmt, i * 2 * DWARF2_ADDR_SIZE);
11733 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11734 (*ranges_by_label)[lab_idx].end,
11735 text_section_label, NULL);
11736 #endif
11737 }
11738 else
11739 {
11740 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11741 (*ranges_by_label)[lab_idx].begin,
11742 fmt, i * 2 * DWARF2_ADDR_SIZE);
11743 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11744 (*ranges_by_label)[lab_idx].end,
11745 NULL);
11746 }
11747 }
11748 else
11749 {
11750 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11751 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11752 fmt = start_fmt;
11753 }
11754 }
11755 }
11756
11757 /* Non-zero if .debug_line_str should be used for .debug_line section
11758 strings or strings that are likely shareable with those. */
11759 #define DWARF5_USE_DEBUG_LINE_STR \
11760 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11761 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11762 /* FIXME: there is no .debug_line_str.dwo section, \
11763 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11764 && !dwarf_split_debug_info)
11765
11766 /* Assign .debug_rnglists indexes. */
11767
11768 static void
11769 index_rnglists (void)
11770 {
11771 unsigned i;
11772 dw_ranges *r;
11773
11774 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11775 if (r->label)
11776 r->idx = rnglist_idx++;
11777 }
11778
11779 /* Emit .debug_rnglists section. */
11780
11781 static void
11782 output_rnglists (unsigned generation)
11783 {
11784 unsigned i;
11785 dw_ranges *r;
11786 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11787 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11788 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11789
11790 switch_to_section (debug_ranges_section);
11791 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11792 /* There are up to 4 unique ranges labels per generation.
11793 See also init_sections_and_labels. */
11794 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11795 2 + generation * 4);
11796 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11797 3 + generation * 4);
11798 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11799 dw2_asm_output_data (4, 0xffffffff,
11800 "Initial length escape value indicating "
11801 "64-bit DWARF extension");
11802 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11803 "Length of Range Lists");
11804 ASM_OUTPUT_LABEL (asm_out_file, l1);
11805 output_dwarf_version ();
11806 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11807 dw2_asm_output_data (1, 0, "Segment Size");
11808 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11809 about relocation sizes and primarily care about the size of .debug*
11810 sections in linked shared libraries and executables, then
11811 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11812 into it are usually larger than just DW_FORM_sec_offset offsets
11813 into the .debug_rnglists section. */
11814 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11815 "Offset Entry Count");
11816 if (dwarf_split_debug_info)
11817 {
11818 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11819 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11820 if (r->label)
11821 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11822 ranges_base_label, NULL);
11823 }
11824
11825 const char *lab = "";
11826 unsigned int len = vec_safe_length (ranges_table);
11827 const char *base = NULL;
11828 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11829 {
11830 int block_num = r->num;
11831
11832 if (r->label)
11833 {
11834 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11835 lab = r->label;
11836 }
11837 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11838 base = NULL;
11839 if (block_num > 0)
11840 {
11841 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11842 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11843
11844 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11845 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11846
11847 if (HAVE_AS_LEB128)
11848 {
11849 /* If all code is in the text section, then the compilation
11850 unit base address defaults to DW_AT_low_pc, which is the
11851 base of the text section. */
11852 if (!have_multiple_function_sections)
11853 {
11854 dw2_asm_output_data (1, DW_RLE_offset_pair,
11855 "DW_RLE_offset_pair (%s)", lab);
11856 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11857 "Range begin address (%s)", lab);
11858 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11859 "Range end address (%s)", lab);
11860 continue;
11861 }
11862 if (base == NULL)
11863 {
11864 dw_ranges *r2 = NULL;
11865 if (i < len - 1)
11866 r2 = &(*ranges_table)[i + 1];
11867 if (r2
11868 && r2->num != 0
11869 && r2->label == NULL
11870 && !r2->maybe_new_sec)
11871 {
11872 dw2_asm_output_data (1, DW_RLE_base_address,
11873 "DW_RLE_base_address (%s)", lab);
11874 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11875 "Base address (%s)", lab);
11876 strcpy (basebuf, blabel);
11877 base = basebuf;
11878 }
11879 }
11880 if (base)
11881 {
11882 dw2_asm_output_data (1, DW_RLE_offset_pair,
11883 "DW_RLE_offset_pair (%s)", lab);
11884 dw2_asm_output_delta_uleb128 (blabel, base,
11885 "Range begin address (%s)", lab);
11886 dw2_asm_output_delta_uleb128 (elabel, base,
11887 "Range end address (%s)", lab);
11888 continue;
11889 }
11890 dw2_asm_output_data (1, DW_RLE_start_length,
11891 "DW_RLE_start_length (%s)", lab);
11892 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11893 "Range begin address (%s)", lab);
11894 dw2_asm_output_delta_uleb128 (elabel, blabel,
11895 "Range length (%s)", lab);
11896 }
11897 else
11898 {
11899 dw2_asm_output_data (1, DW_RLE_start_end,
11900 "DW_RLE_start_end (%s)", lab);
11901 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11902 "Range begin address (%s)", lab);
11903 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11904 "Range end address (%s)", lab);
11905 }
11906 }
11907
11908 /* Negative block_num stands for an index into ranges_by_label. */
11909 else if (block_num < 0)
11910 {
11911 int lab_idx = - block_num - 1;
11912 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11913 const char *elabel = (*ranges_by_label)[lab_idx].end;
11914
11915 if (!have_multiple_function_sections)
11916 gcc_unreachable ();
11917 if (HAVE_AS_LEB128)
11918 {
11919 dw2_asm_output_data (1, DW_RLE_start_length,
11920 "DW_RLE_start_length (%s)", lab);
11921 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11922 "Range begin address (%s)", lab);
11923 dw2_asm_output_delta_uleb128 (elabel, blabel,
11924 "Range length (%s)", lab);
11925 }
11926 else
11927 {
11928 dw2_asm_output_data (1, DW_RLE_start_end,
11929 "DW_RLE_start_end (%s)", lab);
11930 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11931 "Range begin address (%s)", lab);
11932 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11933 "Range end address (%s)", lab);
11934 }
11935 }
11936 else
11937 dw2_asm_output_data (1, DW_RLE_end_of_list,
11938 "DW_RLE_end_of_list (%s)", lab);
11939 }
11940 ASM_OUTPUT_LABEL (asm_out_file, l2);
11941 }
11942
11943 /* Data structure containing information about input files. */
11944 struct file_info
11945 {
11946 const char *path; /* Complete file name. */
11947 const char *fname; /* File name part. */
11948 int length; /* Length of entire string. */
11949 struct dwarf_file_data * file_idx; /* Index in input file table. */
11950 int dir_idx; /* Index in directory table. */
11951 };
11952
11953 /* Data structure containing information about directories with source
11954 files. */
11955 struct dir_info
11956 {
11957 const char *path; /* Path including directory name. */
11958 int length; /* Path length. */
11959 int prefix; /* Index of directory entry which is a prefix. */
11960 int count; /* Number of files in this directory. */
11961 int dir_idx; /* Index of directory used as base. */
11962 };
11963
11964 /* Callback function for file_info comparison. We sort by looking at
11965 the directories in the path. */
11966
11967 static int
11968 file_info_cmp (const void *p1, const void *p2)
11969 {
11970 const struct file_info *const s1 = (const struct file_info *) p1;
11971 const struct file_info *const s2 = (const struct file_info *) p2;
11972 const unsigned char *cp1;
11973 const unsigned char *cp2;
11974
11975 /* Take care of file names without directories. We need to make sure that
11976 we return consistent values to qsort since some will get confused if
11977 we return the same value when identical operands are passed in opposite
11978 orders. So if neither has a directory, return 0 and otherwise return
11979 1 or -1 depending on which one has the directory. We want the one with
11980 the directory to sort after the one without, so all no directory files
11981 are at the start (normally only the compilation unit file). */
11982 if ((s1->path == s1->fname || s2->path == s2->fname))
11983 return (s2->path == s2->fname) - (s1->path == s1->fname);
11984
11985 cp1 = (const unsigned char *) s1->path;
11986 cp2 = (const unsigned char *) s2->path;
11987
11988 while (1)
11989 {
11990 ++cp1;
11991 ++cp2;
11992 /* Reached the end of the first path? If so, handle like above,
11993 but now we want longer directory prefixes before shorter ones. */
11994 if ((cp1 == (const unsigned char *) s1->fname)
11995 || (cp2 == (const unsigned char *) s2->fname))
11996 return ((cp1 == (const unsigned char *) s1->fname)
11997 - (cp2 == (const unsigned char *) s2->fname));
11998
11999 /* Character of current path component the same? */
12000 else if (*cp1 != *cp2)
12001 return *cp1 - *cp2;
12002 }
12003 }
12004
12005 struct file_name_acquire_data
12006 {
12007 struct file_info *files;
12008 int used_files;
12009 int max_files;
12010 };
12011
12012 /* Traversal function for the hash table. */
12013
12014 int
12015 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12016 {
12017 struct dwarf_file_data *d = *slot;
12018 struct file_info *fi;
12019 const char *f;
12020
12021 gcc_assert (fnad->max_files >= d->emitted_number);
12022
12023 if (! d->emitted_number)
12024 return 1;
12025
12026 gcc_assert (fnad->max_files != fnad->used_files);
12027
12028 fi = fnad->files + fnad->used_files++;
12029
12030 /* Skip all leading "./". */
12031 f = d->filename;
12032 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12033 f += 2;
12034
12035 /* Create a new array entry. */
12036 fi->path = f;
12037 fi->length = strlen (f);
12038 fi->file_idx = d;
12039
12040 /* Search for the file name part. */
12041 f = strrchr (f, DIR_SEPARATOR);
12042 #if defined (DIR_SEPARATOR_2)
12043 {
12044 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12045
12046 if (g != NULL)
12047 {
12048 if (f == NULL || f < g)
12049 f = g;
12050 }
12051 }
12052 #endif
12053
12054 fi->fname = f == NULL ? fi->path : f + 1;
12055 return 1;
12056 }
12057
12058 /* Helper function for output_file_names. Emit a FORM encoded
12059 string STR, with assembly comment start ENTRY_KIND and
12060 index IDX */
12061
12062 static void
12063 output_line_string (enum dwarf_form form, const char *str,
12064 const char *entry_kind, unsigned int idx)
12065 {
12066 switch (form)
12067 {
12068 case DW_FORM_string:
12069 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12070 break;
12071 case DW_FORM_line_strp:
12072 if (!debug_line_str_hash)
12073 debug_line_str_hash
12074 = hash_table<indirect_string_hasher>::create_ggc (10);
12075
12076 struct indirect_string_node *node;
12077 node = find_AT_string_in_table (str, debug_line_str_hash);
12078 set_indirect_string (node);
12079 node->form = form;
12080 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12081 debug_line_str_section, "%s: %#x: \"%s\"",
12082 entry_kind, 0, node->str);
12083 break;
12084 default:
12085 gcc_unreachable ();
12086 }
12087 }
12088
12089 /* Output the directory table and the file name table. We try to minimize
12090 the total amount of memory needed. A heuristic is used to avoid large
12091 slowdowns with many input files. */
12092
12093 static void
12094 output_file_names (void)
12095 {
12096 struct file_name_acquire_data fnad;
12097 int numfiles;
12098 struct file_info *files;
12099 struct dir_info *dirs;
12100 int *saved;
12101 int *savehere;
12102 int *backmap;
12103 int ndirs;
12104 int idx_offset;
12105 int i;
12106
12107 if (!last_emitted_file)
12108 {
12109 if (dwarf_version >= 5)
12110 {
12111 dw2_asm_output_data (1, 0, "Directory entry format count");
12112 dw2_asm_output_data_uleb128 (0, "Directories count");
12113 dw2_asm_output_data (1, 0, "File name entry format count");
12114 dw2_asm_output_data_uleb128 (0, "File names count");
12115 }
12116 else
12117 {
12118 dw2_asm_output_data (1, 0, "End directory table");
12119 dw2_asm_output_data (1, 0, "End file name table");
12120 }
12121 return;
12122 }
12123
12124 numfiles = last_emitted_file->emitted_number;
12125
12126 /* Allocate the various arrays we need. */
12127 files = XALLOCAVEC (struct file_info, numfiles);
12128 dirs = XALLOCAVEC (struct dir_info, numfiles);
12129
12130 fnad.files = files;
12131 fnad.used_files = 0;
12132 fnad.max_files = numfiles;
12133 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12134 gcc_assert (fnad.used_files == fnad.max_files);
12135
12136 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12137
12138 /* Find all the different directories used. */
12139 dirs[0].path = files[0].path;
12140 dirs[0].length = files[0].fname - files[0].path;
12141 dirs[0].prefix = -1;
12142 dirs[0].count = 1;
12143 dirs[0].dir_idx = 0;
12144 files[0].dir_idx = 0;
12145 ndirs = 1;
12146
12147 for (i = 1; i < numfiles; i++)
12148 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12149 && memcmp (dirs[ndirs - 1].path, files[i].path,
12150 dirs[ndirs - 1].length) == 0)
12151 {
12152 /* Same directory as last entry. */
12153 files[i].dir_idx = ndirs - 1;
12154 ++dirs[ndirs - 1].count;
12155 }
12156 else
12157 {
12158 int j;
12159
12160 /* This is a new directory. */
12161 dirs[ndirs].path = files[i].path;
12162 dirs[ndirs].length = files[i].fname - files[i].path;
12163 dirs[ndirs].count = 1;
12164 dirs[ndirs].dir_idx = ndirs;
12165 files[i].dir_idx = ndirs;
12166
12167 /* Search for a prefix. */
12168 dirs[ndirs].prefix = -1;
12169 for (j = 0; j < ndirs; j++)
12170 if (dirs[j].length < dirs[ndirs].length
12171 && dirs[j].length > 1
12172 && (dirs[ndirs].prefix == -1
12173 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12174 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12175 dirs[ndirs].prefix = j;
12176
12177 ++ndirs;
12178 }
12179
12180 /* Now to the actual work. We have to find a subset of the directories which
12181 allow expressing the file name using references to the directory table
12182 with the least amount of characters. We do not do an exhaustive search
12183 where we would have to check out every combination of every single
12184 possible prefix. Instead we use a heuristic which provides nearly optimal
12185 results in most cases and never is much off. */
12186 saved = XALLOCAVEC (int, ndirs);
12187 savehere = XALLOCAVEC (int, ndirs);
12188
12189 memset (saved, '\0', ndirs * sizeof (saved[0]));
12190 for (i = 0; i < ndirs; i++)
12191 {
12192 int j;
12193 int total;
12194
12195 /* We can always save some space for the current directory. But this
12196 does not mean it will be enough to justify adding the directory. */
12197 savehere[i] = dirs[i].length;
12198 total = (savehere[i] - saved[i]) * dirs[i].count;
12199
12200 for (j = i + 1; j < ndirs; j++)
12201 {
12202 savehere[j] = 0;
12203 if (saved[j] < dirs[i].length)
12204 {
12205 /* Determine whether the dirs[i] path is a prefix of the
12206 dirs[j] path. */
12207 int k;
12208
12209 k = dirs[j].prefix;
12210 while (k != -1 && k != (int) i)
12211 k = dirs[k].prefix;
12212
12213 if (k == (int) i)
12214 {
12215 /* Yes it is. We can possibly save some memory by
12216 writing the filenames in dirs[j] relative to
12217 dirs[i]. */
12218 savehere[j] = dirs[i].length;
12219 total += (savehere[j] - saved[j]) * dirs[j].count;
12220 }
12221 }
12222 }
12223
12224 /* Check whether we can save enough to justify adding the dirs[i]
12225 directory. */
12226 if (total > dirs[i].length + 1)
12227 {
12228 /* It's worthwhile adding. */
12229 for (j = i; j < ndirs; j++)
12230 if (savehere[j] > 0)
12231 {
12232 /* Remember how much we saved for this directory so far. */
12233 saved[j] = savehere[j];
12234
12235 /* Remember the prefix directory. */
12236 dirs[j].dir_idx = i;
12237 }
12238 }
12239 }
12240
12241 /* Emit the directory name table. */
12242 idx_offset = dirs[0].length > 0 ? 1 : 0;
12243 enum dwarf_form str_form = DW_FORM_string;
12244 enum dwarf_form idx_form = DW_FORM_udata;
12245 if (dwarf_version >= 5)
12246 {
12247 const char *comp_dir = comp_dir_string ();
12248 if (comp_dir == NULL)
12249 comp_dir = "";
12250 dw2_asm_output_data (1, 1, "Directory entry format count");
12251 if (DWARF5_USE_DEBUG_LINE_STR)
12252 str_form = DW_FORM_line_strp;
12253 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12254 dw2_asm_output_data_uleb128 (str_form, "%s",
12255 get_DW_FORM_name (str_form));
12256 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12257 if (str_form == DW_FORM_string)
12258 {
12259 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12260 for (i = 1 - idx_offset; i < ndirs; i++)
12261 dw2_asm_output_nstring (dirs[i].path,
12262 dirs[i].length
12263 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12264 "Directory Entry: %#x", i + idx_offset);
12265 }
12266 else
12267 {
12268 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12269 for (i = 1 - idx_offset; i < ndirs; i++)
12270 {
12271 const char *str
12272 = ggc_alloc_string (dirs[i].path,
12273 dirs[i].length
12274 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12275 output_line_string (str_form, str, "Directory Entry",
12276 (unsigned) i + idx_offset);
12277 }
12278 }
12279 }
12280 else
12281 {
12282 for (i = 1 - idx_offset; i < ndirs; i++)
12283 dw2_asm_output_nstring (dirs[i].path,
12284 dirs[i].length
12285 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12286 "Directory Entry: %#x", i + idx_offset);
12287
12288 dw2_asm_output_data (1, 0, "End directory table");
12289 }
12290
12291 /* We have to emit them in the order of emitted_number since that's
12292 used in the debug info generation. To do this efficiently we
12293 generate a back-mapping of the indices first. */
12294 backmap = XALLOCAVEC (int, numfiles);
12295 for (i = 0; i < numfiles; i++)
12296 backmap[files[i].file_idx->emitted_number - 1] = i;
12297
12298 if (dwarf_version >= 5)
12299 {
12300 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12301 if (filename0 == NULL)
12302 filename0 = "";
12303 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12304 DW_FORM_data2. Choose one based on the number of directories
12305 and how much space would they occupy in each encoding.
12306 If we have at most 256 directories, all indexes fit into
12307 a single byte, so DW_FORM_data1 is most compact (if there
12308 are at most 128 directories, DW_FORM_udata would be as
12309 compact as that, but not shorter and slower to decode). */
12310 if (ndirs + idx_offset <= 256)
12311 idx_form = DW_FORM_data1;
12312 /* If there are more than 65536 directories, we have to use
12313 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12314 Otherwise, compute what space would occupy if all the indexes
12315 used DW_FORM_udata - sum - and compare that to how large would
12316 be DW_FORM_data2 encoding, and pick the more efficient one. */
12317 else if (ndirs + idx_offset <= 65536)
12318 {
12319 unsigned HOST_WIDE_INT sum = 1;
12320 for (i = 0; i < numfiles; i++)
12321 {
12322 int file_idx = backmap[i];
12323 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12324 sum += size_of_uleb128 (dir_idx);
12325 }
12326 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12327 idx_form = DW_FORM_data2;
12328 }
12329 #ifdef VMS_DEBUGGING_INFO
12330 dw2_asm_output_data (1, 4, "File name entry format count");
12331 #else
12332 dw2_asm_output_data (1, 2, "File name entry format count");
12333 #endif
12334 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12335 dw2_asm_output_data_uleb128 (str_form, "%s",
12336 get_DW_FORM_name (str_form));
12337 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12338 "DW_LNCT_directory_index");
12339 dw2_asm_output_data_uleb128 (idx_form, "%s",
12340 get_DW_FORM_name (idx_form));
12341 #ifdef VMS_DEBUGGING_INFO
12342 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12343 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12344 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12345 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12346 #endif
12347 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12348
12349 output_line_string (str_form, filename0, "File Entry", 0);
12350
12351 /* Include directory index. */
12352 if (idx_form != DW_FORM_udata)
12353 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12354 0, NULL);
12355 else
12356 dw2_asm_output_data_uleb128 (0, NULL);
12357
12358 #ifdef VMS_DEBUGGING_INFO
12359 dw2_asm_output_data_uleb128 (0, NULL);
12360 dw2_asm_output_data_uleb128 (0, NULL);
12361 #endif
12362 }
12363
12364 /* Now write all the file names. */
12365 for (i = 0; i < numfiles; i++)
12366 {
12367 int file_idx = backmap[i];
12368 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12369
12370 #ifdef VMS_DEBUGGING_INFO
12371 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12372
12373 /* Setting these fields can lead to debugger miscomparisons,
12374 but VMS Debug requires them to be set correctly. */
12375
12376 int ver;
12377 long long cdt;
12378 long siz;
12379 int maxfilelen = (strlen (files[file_idx].path)
12380 + dirs[dir_idx].length
12381 + MAX_VMS_VERSION_LEN + 1);
12382 char *filebuf = XALLOCAVEC (char, maxfilelen);
12383
12384 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12385 snprintf (filebuf, maxfilelen, "%s;%d",
12386 files[file_idx].path + dirs[dir_idx].length, ver);
12387
12388 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12389
12390 /* Include directory index. */
12391 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12392 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12393 dir_idx + idx_offset, NULL);
12394 else
12395 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12396
12397 /* Modification time. */
12398 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12399 &cdt, 0, 0, 0) == 0)
12400 ? cdt : 0, NULL);
12401
12402 /* File length in bytes. */
12403 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12404 0, &siz, 0, 0) == 0)
12405 ? siz : 0, NULL);
12406 #else
12407 output_line_string (str_form,
12408 files[file_idx].path + dirs[dir_idx].length,
12409 "File Entry", (unsigned) i + 1);
12410
12411 /* Include directory index. */
12412 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12413 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12414 dir_idx + idx_offset, NULL);
12415 else
12416 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12417
12418 if (dwarf_version >= 5)
12419 continue;
12420
12421 /* Modification time. */
12422 dw2_asm_output_data_uleb128 (0, NULL);
12423
12424 /* File length in bytes. */
12425 dw2_asm_output_data_uleb128 (0, NULL);
12426 #endif /* VMS_DEBUGGING_INFO */
12427 }
12428
12429 if (dwarf_version < 5)
12430 dw2_asm_output_data (1, 0, "End file name table");
12431 }
12432
12433
12434 /* Output one line number table into the .debug_line section. */
12435
12436 static void
12437 output_one_line_info_table (dw_line_info_table *table)
12438 {
12439 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12440 unsigned int current_line = 1;
12441 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12442 dw_line_info_entry *ent, *prev_addr;
12443 size_t i;
12444 unsigned int view;
12445
12446 view = 0;
12447
12448 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12449 {
12450 switch (ent->opcode)
12451 {
12452 case LI_set_address:
12453 /* ??? Unfortunately, we have little choice here currently, and
12454 must always use the most general form. GCC does not know the
12455 address delta itself, so we can't use DW_LNS_advance_pc. Many
12456 ports do have length attributes which will give an upper bound
12457 on the address range. We could perhaps use length attributes
12458 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12459 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12460
12461 view = 0;
12462
12463 /* This can handle any delta. This takes
12464 4+DWARF2_ADDR_SIZE bytes. */
12465 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12466 debug_variable_location_views
12467 ? ", reset view to 0" : "");
12468 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12469 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12470 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12471
12472 prev_addr = ent;
12473 break;
12474
12475 case LI_adv_address:
12476 {
12477 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12478 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12479 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12480
12481 view++;
12482
12483 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12484 dw2_asm_output_delta (2, line_label, prev_label,
12485 "from %s to %s", prev_label, line_label);
12486
12487 prev_addr = ent;
12488 break;
12489 }
12490
12491 case LI_set_line:
12492 if (ent->val == current_line)
12493 {
12494 /* We still need to start a new row, so output a copy insn. */
12495 dw2_asm_output_data (1, DW_LNS_copy,
12496 "copy line %u", current_line);
12497 }
12498 else
12499 {
12500 int line_offset = ent->val - current_line;
12501 int line_delta = line_offset - DWARF_LINE_BASE;
12502
12503 current_line = ent->val;
12504 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12505 {
12506 /* This can handle deltas from -10 to 234, using the current
12507 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12508 This takes 1 byte. */
12509 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12510 "line %u", current_line);
12511 }
12512 else
12513 {
12514 /* This can handle any delta. This takes at least 4 bytes,
12515 depending on the value being encoded. */
12516 dw2_asm_output_data (1, DW_LNS_advance_line,
12517 "advance to line %u", current_line);
12518 dw2_asm_output_data_sleb128 (line_offset, NULL);
12519 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12520 }
12521 }
12522 break;
12523
12524 case LI_set_file:
12525 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12526 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12527 break;
12528
12529 case LI_set_column:
12530 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12531 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12532 break;
12533
12534 case LI_negate_stmt:
12535 current_is_stmt = !current_is_stmt;
12536 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12537 "is_stmt %d", current_is_stmt);
12538 break;
12539
12540 case LI_set_prologue_end:
12541 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12542 "set prologue end");
12543 break;
12544
12545 case LI_set_epilogue_begin:
12546 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12547 "set epilogue begin");
12548 break;
12549
12550 case LI_set_discriminator:
12551 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12552 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12553 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12554 dw2_asm_output_data_uleb128 (ent->val, NULL);
12555 break;
12556 }
12557 }
12558
12559 /* Emit debug info for the address of the end of the table. */
12560 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12561 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12562 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12563 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12564
12565 dw2_asm_output_data (1, 0, "end sequence");
12566 dw2_asm_output_data_uleb128 (1, NULL);
12567 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12568 }
12569
12570 /* Output the source line number correspondence information. This
12571 information goes into the .debug_line section. */
12572
12573 static void
12574 output_line_info (bool prologue_only)
12575 {
12576 static unsigned int generation;
12577 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12578 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12579 bool saw_one = false;
12580 int opc;
12581
12582 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12583 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12584 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12585 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12586
12587 if (!XCOFF_DEBUGGING_INFO)
12588 {
12589 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12590 dw2_asm_output_data (4, 0xffffffff,
12591 "Initial length escape value indicating 64-bit DWARF extension");
12592 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12593 "Length of Source Line Info");
12594 }
12595
12596 ASM_OUTPUT_LABEL (asm_out_file, l1);
12597
12598 output_dwarf_version ();
12599 if (dwarf_version >= 5)
12600 {
12601 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12602 dw2_asm_output_data (1, 0, "Segment Size");
12603 }
12604 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12605 ASM_OUTPUT_LABEL (asm_out_file, p1);
12606
12607 /* Define the architecture-dependent minimum instruction length (in bytes).
12608 In this implementation of DWARF, this field is used for information
12609 purposes only. Since GCC generates assembly language, we have no
12610 a priori knowledge of how many instruction bytes are generated for each
12611 source line, and therefore can use only the DW_LNE_set_address and
12612 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12613 this as '1', which is "correct enough" for all architectures,
12614 and don't let the target override. */
12615 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12616
12617 if (dwarf_version >= 4)
12618 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12619 "Maximum Operations Per Instruction");
12620 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12621 "Default is_stmt_start flag");
12622 dw2_asm_output_data (1, DWARF_LINE_BASE,
12623 "Line Base Value (Special Opcodes)");
12624 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12625 "Line Range Value (Special Opcodes)");
12626 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12627 "Special Opcode Base");
12628
12629 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12630 {
12631 int n_op_args;
12632 switch (opc)
12633 {
12634 case DW_LNS_advance_pc:
12635 case DW_LNS_advance_line:
12636 case DW_LNS_set_file:
12637 case DW_LNS_set_column:
12638 case DW_LNS_fixed_advance_pc:
12639 case DW_LNS_set_isa:
12640 n_op_args = 1;
12641 break;
12642 default:
12643 n_op_args = 0;
12644 break;
12645 }
12646
12647 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12648 opc, n_op_args);
12649 }
12650
12651 /* Write out the information about the files we use. */
12652 output_file_names ();
12653 ASM_OUTPUT_LABEL (asm_out_file, p2);
12654 if (prologue_only)
12655 {
12656 /* Output the marker for the end of the line number info. */
12657 ASM_OUTPUT_LABEL (asm_out_file, l2);
12658 return;
12659 }
12660
12661 if (separate_line_info)
12662 {
12663 dw_line_info_table *table;
12664 size_t i;
12665
12666 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12667 if (table->in_use)
12668 {
12669 output_one_line_info_table (table);
12670 saw_one = true;
12671 }
12672 }
12673 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12674 {
12675 output_one_line_info_table (cold_text_section_line_info);
12676 saw_one = true;
12677 }
12678
12679 /* ??? Some Darwin linkers crash on a .debug_line section with no
12680 sequences. Further, merely a DW_LNE_end_sequence entry is not
12681 sufficient -- the address column must also be initialized.
12682 Make sure to output at least one set_address/end_sequence pair,
12683 choosing .text since that section is always present. */
12684 if (text_section_line_info->in_use || !saw_one)
12685 output_one_line_info_table (text_section_line_info);
12686
12687 /* Output the marker for the end of the line number info. */
12688 ASM_OUTPUT_LABEL (asm_out_file, l2);
12689 }
12690 \f
12691 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12692
12693 static inline bool
12694 need_endianity_attribute_p (bool reverse)
12695 {
12696 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12697 }
12698
12699 /* Given a pointer to a tree node for some base type, return a pointer to
12700 a DIE that describes the given type. REVERSE is true if the type is
12701 to be interpreted in the reverse storage order wrt the target order.
12702
12703 This routine must only be called for GCC type nodes that correspond to
12704 Dwarf base (fundamental) types. */
12705
12706 static dw_die_ref
12707 base_type_die (tree type, bool reverse)
12708 {
12709 dw_die_ref base_type_result;
12710 enum dwarf_type encoding;
12711 bool fpt_used = false;
12712 struct fixed_point_type_info fpt_info;
12713 tree type_bias = NULL_TREE;
12714
12715 /* If this is a subtype that should not be emitted as a subrange type,
12716 use the base type. See subrange_type_for_debug_p. */
12717 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12718 type = TREE_TYPE (type);
12719
12720 switch (TREE_CODE (type))
12721 {
12722 case INTEGER_TYPE:
12723 if ((dwarf_version >= 4 || !dwarf_strict)
12724 && TYPE_NAME (type)
12725 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12726 && DECL_IS_BUILTIN (TYPE_NAME (type))
12727 && DECL_NAME (TYPE_NAME (type)))
12728 {
12729 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12730 if (strcmp (name, "char16_t") == 0
12731 || strcmp (name, "char32_t") == 0)
12732 {
12733 encoding = DW_ATE_UTF;
12734 break;
12735 }
12736 }
12737 if ((dwarf_version >= 3 || !dwarf_strict)
12738 && lang_hooks.types.get_fixed_point_type_info)
12739 {
12740 memset (&fpt_info, 0, sizeof (fpt_info));
12741 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12742 {
12743 fpt_used = true;
12744 encoding = ((TYPE_UNSIGNED (type))
12745 ? DW_ATE_unsigned_fixed
12746 : DW_ATE_signed_fixed);
12747 break;
12748 }
12749 }
12750 if (TYPE_STRING_FLAG (type))
12751 {
12752 if (TYPE_UNSIGNED (type))
12753 encoding = DW_ATE_unsigned_char;
12754 else
12755 encoding = DW_ATE_signed_char;
12756 }
12757 else if (TYPE_UNSIGNED (type))
12758 encoding = DW_ATE_unsigned;
12759 else
12760 encoding = DW_ATE_signed;
12761
12762 if (!dwarf_strict
12763 && lang_hooks.types.get_type_bias)
12764 type_bias = lang_hooks.types.get_type_bias (type);
12765 break;
12766
12767 case REAL_TYPE:
12768 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12769 {
12770 if (dwarf_version >= 3 || !dwarf_strict)
12771 encoding = DW_ATE_decimal_float;
12772 else
12773 encoding = DW_ATE_lo_user;
12774 }
12775 else
12776 encoding = DW_ATE_float;
12777 break;
12778
12779 case FIXED_POINT_TYPE:
12780 if (!(dwarf_version >= 3 || !dwarf_strict))
12781 encoding = DW_ATE_lo_user;
12782 else if (TYPE_UNSIGNED (type))
12783 encoding = DW_ATE_unsigned_fixed;
12784 else
12785 encoding = DW_ATE_signed_fixed;
12786 break;
12787
12788 /* Dwarf2 doesn't know anything about complex ints, so use
12789 a user defined type for it. */
12790 case COMPLEX_TYPE:
12791 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12792 encoding = DW_ATE_complex_float;
12793 else
12794 encoding = DW_ATE_lo_user;
12795 break;
12796
12797 case BOOLEAN_TYPE:
12798 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12799 encoding = DW_ATE_boolean;
12800 break;
12801
12802 default:
12803 /* No other TREE_CODEs are Dwarf fundamental types. */
12804 gcc_unreachable ();
12805 }
12806
12807 base_type_result = new_die_raw (DW_TAG_base_type);
12808
12809 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12810 int_size_in_bytes (type));
12811 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12812
12813 if (need_endianity_attribute_p (reverse))
12814 add_AT_unsigned (base_type_result, DW_AT_endianity,
12815 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12816
12817 add_alignment_attribute (base_type_result, type);
12818
12819 if (fpt_used)
12820 {
12821 switch (fpt_info.scale_factor_kind)
12822 {
12823 case fixed_point_scale_factor_binary:
12824 add_AT_int (base_type_result, DW_AT_binary_scale,
12825 fpt_info.scale_factor.binary);
12826 break;
12827
12828 case fixed_point_scale_factor_decimal:
12829 add_AT_int (base_type_result, DW_AT_decimal_scale,
12830 fpt_info.scale_factor.decimal);
12831 break;
12832
12833 case fixed_point_scale_factor_arbitrary:
12834 /* Arbitrary scale factors cannot be described in standard DWARF,
12835 yet. */
12836 if (!dwarf_strict)
12837 {
12838 /* Describe the scale factor as a rational constant. */
12839 const dw_die_ref scale_factor
12840 = new_die (DW_TAG_constant, comp_unit_die (), type);
12841
12842 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12843 fpt_info.scale_factor.arbitrary.numerator);
12844 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12845 fpt_info.scale_factor.arbitrary.denominator);
12846
12847 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12848 }
12849 break;
12850
12851 default:
12852 gcc_unreachable ();
12853 }
12854 }
12855
12856 if (type_bias)
12857 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12858 dw_scalar_form_constant
12859 | dw_scalar_form_exprloc
12860 | dw_scalar_form_reference,
12861 NULL);
12862
12863 return base_type_result;
12864 }
12865
12866 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12867 named 'auto' in its type: return true for it, false otherwise. */
12868
12869 static inline bool
12870 is_cxx_auto (tree type)
12871 {
12872 if (is_cxx ())
12873 {
12874 tree name = TYPE_IDENTIFIER (type);
12875 if (name == get_identifier ("auto")
12876 || name == get_identifier ("decltype(auto)"))
12877 return true;
12878 }
12879 return false;
12880 }
12881
12882 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12883 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12884
12885 static inline int
12886 is_base_type (tree type)
12887 {
12888 switch (TREE_CODE (type))
12889 {
12890 case INTEGER_TYPE:
12891 case REAL_TYPE:
12892 case FIXED_POINT_TYPE:
12893 case COMPLEX_TYPE:
12894 case BOOLEAN_TYPE:
12895 case POINTER_BOUNDS_TYPE:
12896 return 1;
12897
12898 case VOID_TYPE:
12899 case ARRAY_TYPE:
12900 case RECORD_TYPE:
12901 case UNION_TYPE:
12902 case QUAL_UNION_TYPE:
12903 case ENUMERAL_TYPE:
12904 case FUNCTION_TYPE:
12905 case METHOD_TYPE:
12906 case POINTER_TYPE:
12907 case REFERENCE_TYPE:
12908 case NULLPTR_TYPE:
12909 case OFFSET_TYPE:
12910 case LANG_TYPE:
12911 case VECTOR_TYPE:
12912 return 0;
12913
12914 default:
12915 if (is_cxx_auto (type))
12916 return 0;
12917 gcc_unreachable ();
12918 }
12919
12920 return 0;
12921 }
12922
12923 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12924 node, return the size in bits for the type if it is a constant, or else
12925 return the alignment for the type if the type's size is not constant, or
12926 else return BITS_PER_WORD if the type actually turns out to be an
12927 ERROR_MARK node. */
12928
12929 static inline unsigned HOST_WIDE_INT
12930 simple_type_size_in_bits (const_tree type)
12931 {
12932 if (TREE_CODE (type) == ERROR_MARK)
12933 return BITS_PER_WORD;
12934 else if (TYPE_SIZE (type) == NULL_TREE)
12935 return 0;
12936 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12937 return tree_to_uhwi (TYPE_SIZE (type));
12938 else
12939 return TYPE_ALIGN (type);
12940 }
12941
12942 /* Similarly, but return an offset_int instead of UHWI. */
12943
12944 static inline offset_int
12945 offset_int_type_size_in_bits (const_tree type)
12946 {
12947 if (TREE_CODE (type) == ERROR_MARK)
12948 return BITS_PER_WORD;
12949 else if (TYPE_SIZE (type) == NULL_TREE)
12950 return 0;
12951 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12952 return wi::to_offset (TYPE_SIZE (type));
12953 else
12954 return TYPE_ALIGN (type);
12955 }
12956
12957 /* Given a pointer to a tree node for a subrange type, return a pointer
12958 to a DIE that describes the given type. */
12959
12960 static dw_die_ref
12961 subrange_type_die (tree type, tree low, tree high, tree bias,
12962 dw_die_ref context_die)
12963 {
12964 dw_die_ref subrange_die;
12965 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12966
12967 if (context_die == NULL)
12968 context_die = comp_unit_die ();
12969
12970 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12971
12972 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12973 {
12974 /* The size of the subrange type and its base type do not match,
12975 so we need to generate a size attribute for the subrange type. */
12976 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12977 }
12978
12979 add_alignment_attribute (subrange_die, type);
12980
12981 if (low)
12982 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
12983 if (high)
12984 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
12985 if (bias && !dwarf_strict)
12986 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
12987 dw_scalar_form_constant
12988 | dw_scalar_form_exprloc
12989 | dw_scalar_form_reference,
12990 NULL);
12991
12992 return subrange_die;
12993 }
12994
12995 /* Returns the (const and/or volatile) cv_qualifiers associated with
12996 the decl node. This will normally be augmented with the
12997 cv_qualifiers of the underlying type in add_type_attribute. */
12998
12999 static int
13000 decl_quals (const_tree decl)
13001 {
13002 return ((TREE_READONLY (decl)
13003 /* The C++ front-end correctly marks reference-typed
13004 variables as readonly, but from a language (and debug
13005 info) standpoint they are not const-qualified. */
13006 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13007 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13008 | (TREE_THIS_VOLATILE (decl)
13009 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13010 }
13011
13012 /* Determine the TYPE whose qualifiers match the largest strict subset
13013 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13014 qualifiers outside QUAL_MASK. */
13015
13016 static int
13017 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13018 {
13019 tree t;
13020 int best_rank = 0, best_qual = 0, max_rank;
13021
13022 type_quals &= qual_mask;
13023 max_rank = popcount_hwi (type_quals) - 1;
13024
13025 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13026 t = TYPE_NEXT_VARIANT (t))
13027 {
13028 int q = TYPE_QUALS (t) & qual_mask;
13029
13030 if ((q & type_quals) == q && q != type_quals
13031 && check_base_type (t, type))
13032 {
13033 int rank = popcount_hwi (q);
13034
13035 if (rank > best_rank)
13036 {
13037 best_rank = rank;
13038 best_qual = q;
13039 }
13040 }
13041 }
13042
13043 return best_qual;
13044 }
13045
13046 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13047 static const dwarf_qual_info_t dwarf_qual_info[] =
13048 {
13049 { TYPE_QUAL_CONST, DW_TAG_const_type },
13050 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13051 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13052 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13053 };
13054 static const unsigned int dwarf_qual_info_size
13055 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13056
13057 /* If DIE is a qualified DIE of some base DIE with the same parent,
13058 return the base DIE, otherwise return NULL. Set MASK to the
13059 qualifiers added compared to the returned DIE. */
13060
13061 static dw_die_ref
13062 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13063 {
13064 unsigned int i;
13065 for (i = 0; i < dwarf_qual_info_size; i++)
13066 if (die->die_tag == dwarf_qual_info[i].t)
13067 break;
13068 if (i == dwarf_qual_info_size)
13069 return NULL;
13070 if (vec_safe_length (die->die_attr) != 1)
13071 return NULL;
13072 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13073 if (type == NULL || type->die_parent != die->die_parent)
13074 return NULL;
13075 *mask |= dwarf_qual_info[i].q;
13076 if (depth)
13077 {
13078 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13079 if (ret)
13080 return ret;
13081 }
13082 return type;
13083 }
13084
13085 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13086 entry that chains the modifiers specified by CV_QUALS in front of the
13087 given type. REVERSE is true if the type is to be interpreted in the
13088 reverse storage order wrt the target order. */
13089
13090 static dw_die_ref
13091 modified_type_die (tree type, int cv_quals, bool reverse,
13092 dw_die_ref context_die)
13093 {
13094 enum tree_code code = TREE_CODE (type);
13095 dw_die_ref mod_type_die;
13096 dw_die_ref sub_die = NULL;
13097 tree item_type = NULL;
13098 tree qualified_type;
13099 tree name, low, high;
13100 dw_die_ref mod_scope;
13101 /* Only these cv-qualifiers are currently handled. */
13102 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13103 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13104 ENCODE_QUAL_ADDR_SPACE(~0U));
13105 const bool reverse_base_type
13106 = need_endianity_attribute_p (reverse) && is_base_type (type);
13107
13108 if (code == ERROR_MARK)
13109 return NULL;
13110
13111 if (lang_hooks.types.get_debug_type)
13112 {
13113 tree debug_type = lang_hooks.types.get_debug_type (type);
13114
13115 if (debug_type != NULL_TREE && debug_type != type)
13116 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13117 }
13118
13119 cv_quals &= cv_qual_mask;
13120
13121 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13122 tag modifier (and not an attribute) old consumers won't be able
13123 to handle it. */
13124 if (dwarf_version < 3)
13125 cv_quals &= ~TYPE_QUAL_RESTRICT;
13126
13127 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13128 if (dwarf_version < 5)
13129 cv_quals &= ~TYPE_QUAL_ATOMIC;
13130
13131 /* See if we already have the appropriately qualified variant of
13132 this type. */
13133 qualified_type = get_qualified_type (type, cv_quals);
13134
13135 if (qualified_type == sizetype)
13136 {
13137 /* Try not to expose the internal sizetype type's name. */
13138 if (TYPE_NAME (qualified_type)
13139 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13140 {
13141 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13142
13143 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13144 && (TYPE_PRECISION (t)
13145 == TYPE_PRECISION (qualified_type))
13146 && (TYPE_UNSIGNED (t)
13147 == TYPE_UNSIGNED (qualified_type)));
13148 qualified_type = t;
13149 }
13150 else if (qualified_type == sizetype
13151 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13152 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13153 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13154 qualified_type = size_type_node;
13155 }
13156
13157 /* If we do, then we can just use its DIE, if it exists. */
13158 if (qualified_type)
13159 {
13160 mod_type_die = lookup_type_die (qualified_type);
13161
13162 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13163 dealt with specially: the DIE with the attribute, if it exists, is
13164 placed immediately after the regular DIE for the same base type. */
13165 if (mod_type_die
13166 && (!reverse_base_type
13167 || ((mod_type_die = mod_type_die->die_sib) != NULL
13168 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13169 return mod_type_die;
13170 }
13171
13172 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13173
13174 /* Handle C typedef types. */
13175 if (name
13176 && TREE_CODE (name) == TYPE_DECL
13177 && DECL_ORIGINAL_TYPE (name)
13178 && !DECL_ARTIFICIAL (name))
13179 {
13180 tree dtype = TREE_TYPE (name);
13181
13182 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13183 if (qualified_type == dtype && !reverse_base_type)
13184 {
13185 tree origin = decl_ultimate_origin (name);
13186
13187 /* Typedef variants that have an abstract origin don't get their own
13188 type DIE (see gen_typedef_die), so fall back on the ultimate
13189 abstract origin instead. */
13190 if (origin != NULL && origin != name)
13191 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13192 context_die);
13193
13194 /* For a named type, use the typedef. */
13195 gen_type_die (qualified_type, context_die);
13196 return lookup_type_die (qualified_type);
13197 }
13198 else
13199 {
13200 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13201 dquals &= cv_qual_mask;
13202 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13203 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13204 /* cv-unqualified version of named type. Just use
13205 the unnamed type to which it refers. */
13206 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13207 reverse, context_die);
13208 /* Else cv-qualified version of named type; fall through. */
13209 }
13210 }
13211
13212 mod_scope = scope_die_for (type, context_die);
13213
13214 if (cv_quals)
13215 {
13216 int sub_quals = 0, first_quals = 0;
13217 unsigned i;
13218 dw_die_ref first = NULL, last = NULL;
13219
13220 /* Determine a lesser qualified type that most closely matches
13221 this one. Then generate DW_TAG_* entries for the remaining
13222 qualifiers. */
13223 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13224 cv_qual_mask);
13225 if (sub_quals && use_debug_types)
13226 {
13227 bool needed = false;
13228 /* If emitting type units, make sure the order of qualifiers
13229 is canonical. Thus, start from unqualified type if
13230 an earlier qualifier is missing in sub_quals, but some later
13231 one is present there. */
13232 for (i = 0; i < dwarf_qual_info_size; i++)
13233 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13234 needed = true;
13235 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13236 {
13237 sub_quals = 0;
13238 break;
13239 }
13240 }
13241 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13242 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13243 {
13244 /* As not all intermediate qualified DIEs have corresponding
13245 tree types, ensure that qualified DIEs in the same scope
13246 as their DW_AT_type are emitted after their DW_AT_type,
13247 only with other qualified DIEs for the same type possibly
13248 in between them. Determine the range of such qualified
13249 DIEs now (first being the base type, last being corresponding
13250 last qualified DIE for it). */
13251 unsigned int count = 0;
13252 first = qualified_die_p (mod_type_die, &first_quals,
13253 dwarf_qual_info_size);
13254 if (first == NULL)
13255 first = mod_type_die;
13256 gcc_assert ((first_quals & ~sub_quals) == 0);
13257 for (count = 0, last = first;
13258 count < (1U << dwarf_qual_info_size);
13259 count++, last = last->die_sib)
13260 {
13261 int quals = 0;
13262 if (last == mod_scope->die_child)
13263 break;
13264 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13265 != first)
13266 break;
13267 }
13268 }
13269
13270 for (i = 0; i < dwarf_qual_info_size; i++)
13271 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13272 {
13273 dw_die_ref d;
13274 if (first && first != last)
13275 {
13276 for (d = first->die_sib; ; d = d->die_sib)
13277 {
13278 int quals = 0;
13279 qualified_die_p (d, &quals, dwarf_qual_info_size);
13280 if (quals == (first_quals | dwarf_qual_info[i].q))
13281 break;
13282 if (d == last)
13283 {
13284 d = NULL;
13285 break;
13286 }
13287 }
13288 if (d)
13289 {
13290 mod_type_die = d;
13291 continue;
13292 }
13293 }
13294 if (first)
13295 {
13296 d = new_die_raw (dwarf_qual_info[i].t);
13297 add_child_die_after (mod_scope, d, last);
13298 last = d;
13299 }
13300 else
13301 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13302 if (mod_type_die)
13303 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13304 mod_type_die = d;
13305 first_quals |= dwarf_qual_info[i].q;
13306 }
13307 }
13308 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13309 {
13310 dwarf_tag tag = DW_TAG_pointer_type;
13311 if (code == REFERENCE_TYPE)
13312 {
13313 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13314 tag = DW_TAG_rvalue_reference_type;
13315 else
13316 tag = DW_TAG_reference_type;
13317 }
13318 mod_type_die = new_die (tag, mod_scope, type);
13319
13320 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13321 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13322 add_alignment_attribute (mod_type_die, type);
13323 item_type = TREE_TYPE (type);
13324
13325 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13326 if (!ADDR_SPACE_GENERIC_P (as))
13327 {
13328 int action = targetm.addr_space.debug (as);
13329 if (action >= 0)
13330 {
13331 /* Positive values indicate an address_class. */
13332 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13333 }
13334 else
13335 {
13336 /* Negative values indicate an (inverted) segment base reg. */
13337 dw_loc_descr_ref d
13338 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13339 add_AT_loc (mod_type_die, DW_AT_segment, d);
13340 }
13341 }
13342 }
13343 else if (code == INTEGER_TYPE
13344 && TREE_TYPE (type) != NULL_TREE
13345 && subrange_type_for_debug_p (type, &low, &high))
13346 {
13347 tree bias = NULL_TREE;
13348 if (lang_hooks.types.get_type_bias)
13349 bias = lang_hooks.types.get_type_bias (type);
13350 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13351 item_type = TREE_TYPE (type);
13352 }
13353 else if (is_base_type (type))
13354 {
13355 mod_type_die = base_type_die (type, reverse);
13356
13357 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13358 if (reverse_base_type)
13359 {
13360 dw_die_ref after_die
13361 = modified_type_die (type, cv_quals, false, context_die);
13362 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13363 }
13364 else
13365 add_child_die (comp_unit_die (), mod_type_die);
13366
13367 add_pubtype (type, mod_type_die);
13368 }
13369 else
13370 {
13371 gen_type_die (type, context_die);
13372
13373 /* We have to get the type_main_variant here (and pass that to the
13374 `lookup_type_die' routine) because the ..._TYPE node we have
13375 might simply be a *copy* of some original type node (where the
13376 copy was created to help us keep track of typedef names) and
13377 that copy might have a different TYPE_UID from the original
13378 ..._TYPE node. */
13379 if (TREE_CODE (type) == FUNCTION_TYPE
13380 || TREE_CODE (type) == METHOD_TYPE)
13381 {
13382 /* For function/method types, can't just use type_main_variant here,
13383 because that can have different ref-qualifiers for C++,
13384 but try to canonicalize. */
13385 tree main = TYPE_MAIN_VARIANT (type);
13386 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13387 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13388 && check_base_type (t, main)
13389 && check_lang_type (t, type))
13390 return lookup_type_die (t);
13391 return lookup_type_die (type);
13392 }
13393 else if (TREE_CODE (type) != VECTOR_TYPE
13394 && TREE_CODE (type) != ARRAY_TYPE)
13395 return lookup_type_die (type_main_variant (type));
13396 else
13397 /* Vectors have the debugging information in the type,
13398 not the main variant. */
13399 return lookup_type_die (type);
13400 }
13401
13402 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13403 don't output a DW_TAG_typedef, since there isn't one in the
13404 user's program; just attach a DW_AT_name to the type.
13405 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13406 if the base type already has the same name. */
13407 if (name
13408 && ((TREE_CODE (name) != TYPE_DECL
13409 && (qualified_type == TYPE_MAIN_VARIANT (type)
13410 || (cv_quals == TYPE_UNQUALIFIED)))
13411 || (TREE_CODE (name) == TYPE_DECL
13412 && TREE_TYPE (name) == qualified_type
13413 && DECL_NAME (name))))
13414 {
13415 if (TREE_CODE (name) == TYPE_DECL)
13416 /* Could just call add_name_and_src_coords_attributes here,
13417 but since this is a builtin type it doesn't have any
13418 useful source coordinates anyway. */
13419 name = DECL_NAME (name);
13420 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13421 }
13422 /* This probably indicates a bug. */
13423 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13424 {
13425 name = TYPE_IDENTIFIER (type);
13426 add_name_attribute (mod_type_die,
13427 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13428 }
13429
13430 if (qualified_type && !reverse_base_type)
13431 equate_type_number_to_die (qualified_type, mod_type_die);
13432
13433 if (item_type)
13434 /* We must do this after the equate_type_number_to_die call, in case
13435 this is a recursive type. This ensures that the modified_type_die
13436 recursion will terminate even if the type is recursive. Recursive
13437 types are possible in Ada. */
13438 sub_die = modified_type_die (item_type,
13439 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13440 reverse,
13441 context_die);
13442
13443 if (sub_die != NULL)
13444 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13445
13446 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13447 if (TYPE_ARTIFICIAL (type))
13448 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13449
13450 return mod_type_die;
13451 }
13452
13453 /* Generate DIEs for the generic parameters of T.
13454 T must be either a generic type or a generic function.
13455 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13456
13457 static void
13458 gen_generic_params_dies (tree t)
13459 {
13460 tree parms, args;
13461 int parms_num, i;
13462 dw_die_ref die = NULL;
13463 int non_default;
13464
13465 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13466 return;
13467
13468 if (TYPE_P (t))
13469 die = lookup_type_die (t);
13470 else if (DECL_P (t))
13471 die = lookup_decl_die (t);
13472
13473 gcc_assert (die);
13474
13475 parms = lang_hooks.get_innermost_generic_parms (t);
13476 if (!parms)
13477 /* T has no generic parameter. It means T is neither a generic type
13478 or function. End of story. */
13479 return;
13480
13481 parms_num = TREE_VEC_LENGTH (parms);
13482 args = lang_hooks.get_innermost_generic_args (t);
13483 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13484 non_default = int_cst_value (TREE_CHAIN (args));
13485 else
13486 non_default = TREE_VEC_LENGTH (args);
13487 for (i = 0; i < parms_num; i++)
13488 {
13489 tree parm, arg, arg_pack_elems;
13490 dw_die_ref parm_die;
13491
13492 parm = TREE_VEC_ELT (parms, i);
13493 arg = TREE_VEC_ELT (args, i);
13494 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13495 gcc_assert (parm && TREE_VALUE (parm) && arg);
13496
13497 if (parm && TREE_VALUE (parm) && arg)
13498 {
13499 /* If PARM represents a template parameter pack,
13500 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13501 by DW_TAG_template_*_parameter DIEs for the argument
13502 pack elements of ARG. Note that ARG would then be
13503 an argument pack. */
13504 if (arg_pack_elems)
13505 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13506 arg_pack_elems,
13507 die);
13508 else
13509 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13510 true /* emit name */, die);
13511 if (i >= non_default)
13512 add_AT_flag (parm_die, DW_AT_default_value, 1);
13513 }
13514 }
13515 }
13516
13517 /* Create and return a DIE for PARM which should be
13518 the representation of a generic type parameter.
13519 For instance, in the C++ front end, PARM would be a template parameter.
13520 ARG is the argument to PARM.
13521 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13522 name of the PARM.
13523 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13524 as a child node. */
13525
13526 static dw_die_ref
13527 generic_parameter_die (tree parm, tree arg,
13528 bool emit_name_p,
13529 dw_die_ref parent_die)
13530 {
13531 dw_die_ref tmpl_die = NULL;
13532 const char *name = NULL;
13533
13534 if (!parm || !DECL_NAME (parm) || !arg)
13535 return NULL;
13536
13537 /* We support non-type generic parameters and arguments,
13538 type generic parameters and arguments, as well as
13539 generic generic parameters (a.k.a. template template parameters in C++)
13540 and arguments. */
13541 if (TREE_CODE (parm) == PARM_DECL)
13542 /* PARM is a nontype generic parameter */
13543 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13544 else if (TREE_CODE (parm) == TYPE_DECL)
13545 /* PARM is a type generic parameter. */
13546 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13547 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13548 /* PARM is a generic generic parameter.
13549 Its DIE is a GNU extension. It shall have a
13550 DW_AT_name attribute to represent the name of the template template
13551 parameter, and a DW_AT_GNU_template_name attribute to represent the
13552 name of the template template argument. */
13553 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13554 parent_die, parm);
13555 else
13556 gcc_unreachable ();
13557
13558 if (tmpl_die)
13559 {
13560 tree tmpl_type;
13561
13562 /* If PARM is a generic parameter pack, it means we are
13563 emitting debug info for a template argument pack element.
13564 In other terms, ARG is a template argument pack element.
13565 In that case, we don't emit any DW_AT_name attribute for
13566 the die. */
13567 if (emit_name_p)
13568 {
13569 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13570 gcc_assert (name);
13571 add_AT_string (tmpl_die, DW_AT_name, name);
13572 }
13573
13574 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13575 {
13576 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13577 TMPL_DIE should have a child DW_AT_type attribute that is set
13578 to the type of the argument to PARM, which is ARG.
13579 If PARM is a type generic parameter, TMPL_DIE should have a
13580 child DW_AT_type that is set to ARG. */
13581 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13582 add_type_attribute (tmpl_die, tmpl_type,
13583 (TREE_THIS_VOLATILE (tmpl_type)
13584 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13585 false, parent_die);
13586 }
13587 else
13588 {
13589 /* So TMPL_DIE is a DIE representing a
13590 a generic generic template parameter, a.k.a template template
13591 parameter in C++ and arg is a template. */
13592
13593 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13594 to the name of the argument. */
13595 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13596 if (name)
13597 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13598 }
13599
13600 if (TREE_CODE (parm) == PARM_DECL)
13601 /* So PARM is a non-type generic parameter.
13602 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13603 attribute of TMPL_DIE which value represents the value
13604 of ARG.
13605 We must be careful here:
13606 The value of ARG might reference some function decls.
13607 We might currently be emitting debug info for a generic
13608 type and types are emitted before function decls, we don't
13609 know if the function decls referenced by ARG will actually be
13610 emitted after cgraph computations.
13611 So must defer the generation of the DW_AT_const_value to
13612 after cgraph is ready. */
13613 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13614 }
13615
13616 return tmpl_die;
13617 }
13618
13619 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13620 PARM_PACK must be a template parameter pack. The returned DIE
13621 will be child DIE of PARENT_DIE. */
13622
13623 static dw_die_ref
13624 template_parameter_pack_die (tree parm_pack,
13625 tree parm_pack_args,
13626 dw_die_ref parent_die)
13627 {
13628 dw_die_ref die;
13629 int j;
13630
13631 gcc_assert (parent_die && parm_pack);
13632
13633 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13634 add_name_and_src_coords_attributes (die, parm_pack);
13635 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13636 generic_parameter_die (parm_pack,
13637 TREE_VEC_ELT (parm_pack_args, j),
13638 false /* Don't emit DW_AT_name */,
13639 die);
13640 return die;
13641 }
13642
13643 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13644 an enumerated type. */
13645
13646 static inline int
13647 type_is_enum (const_tree type)
13648 {
13649 return TREE_CODE (type) == ENUMERAL_TYPE;
13650 }
13651
13652 /* Return the DBX register number described by a given RTL node. */
13653
13654 static unsigned int
13655 dbx_reg_number (const_rtx rtl)
13656 {
13657 unsigned regno = REGNO (rtl);
13658
13659 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13660
13661 #ifdef LEAF_REG_REMAP
13662 if (crtl->uses_only_leaf_regs)
13663 {
13664 int leaf_reg = LEAF_REG_REMAP (regno);
13665 if (leaf_reg != -1)
13666 regno = (unsigned) leaf_reg;
13667 }
13668 #endif
13669
13670 regno = DBX_REGISTER_NUMBER (regno);
13671 gcc_assert (regno != INVALID_REGNUM);
13672 return regno;
13673 }
13674
13675 /* Optionally add a DW_OP_piece term to a location description expression.
13676 DW_OP_piece is only added if the location description expression already
13677 doesn't end with DW_OP_piece. */
13678
13679 static void
13680 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13681 {
13682 dw_loc_descr_ref loc;
13683
13684 if (*list_head != NULL)
13685 {
13686 /* Find the end of the chain. */
13687 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13688 ;
13689
13690 if (loc->dw_loc_opc != DW_OP_piece)
13691 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13692 }
13693 }
13694
13695 /* Return a location descriptor that designates a machine register or
13696 zero if there is none. */
13697
13698 static dw_loc_descr_ref
13699 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13700 {
13701 rtx regs;
13702
13703 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13704 return 0;
13705
13706 /* We only use "frame base" when we're sure we're talking about the
13707 post-prologue local stack frame. We do this by *not* running
13708 register elimination until this point, and recognizing the special
13709 argument pointer and soft frame pointer rtx's.
13710 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13711 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13712 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13713 {
13714 dw_loc_descr_ref result = NULL;
13715
13716 if (dwarf_version >= 4 || !dwarf_strict)
13717 {
13718 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13719 initialized);
13720 if (result)
13721 add_loc_descr (&result,
13722 new_loc_descr (DW_OP_stack_value, 0, 0));
13723 }
13724 return result;
13725 }
13726
13727 regs = targetm.dwarf_register_span (rtl);
13728
13729 if (REG_NREGS (rtl) > 1 || regs)
13730 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13731 else
13732 {
13733 unsigned int dbx_regnum = dbx_reg_number (rtl);
13734 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13735 return 0;
13736 return one_reg_loc_descriptor (dbx_regnum, initialized);
13737 }
13738 }
13739
13740 /* Return a location descriptor that designates a machine register for
13741 a given hard register number. */
13742
13743 static dw_loc_descr_ref
13744 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13745 {
13746 dw_loc_descr_ref reg_loc_descr;
13747
13748 if (regno <= 31)
13749 reg_loc_descr
13750 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13751 else
13752 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13753
13754 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13755 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13756
13757 return reg_loc_descr;
13758 }
13759
13760 /* Given an RTL of a register, return a location descriptor that
13761 designates a value that spans more than one register. */
13762
13763 static dw_loc_descr_ref
13764 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13765 enum var_init_status initialized)
13766 {
13767 int size, i;
13768 dw_loc_descr_ref loc_result = NULL;
13769
13770 /* Simple, contiguous registers. */
13771 if (regs == NULL_RTX)
13772 {
13773 unsigned reg = REGNO (rtl);
13774 int nregs;
13775
13776 #ifdef LEAF_REG_REMAP
13777 if (crtl->uses_only_leaf_regs)
13778 {
13779 int leaf_reg = LEAF_REG_REMAP (reg);
13780 if (leaf_reg != -1)
13781 reg = (unsigned) leaf_reg;
13782 }
13783 #endif
13784
13785 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13786 nregs = REG_NREGS (rtl);
13787
13788 /* At present we only track constant-sized pieces. */
13789 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13790 return NULL;
13791 size /= nregs;
13792
13793 loc_result = NULL;
13794 while (nregs--)
13795 {
13796 dw_loc_descr_ref t;
13797
13798 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13799 VAR_INIT_STATUS_INITIALIZED);
13800 add_loc_descr (&loc_result, t);
13801 add_loc_descr_op_piece (&loc_result, size);
13802 ++reg;
13803 }
13804 return loc_result;
13805 }
13806
13807 /* Now onto stupid register sets in non contiguous locations. */
13808
13809 gcc_assert (GET_CODE (regs) == PARALLEL);
13810
13811 /* At present we only track constant-sized pieces. */
13812 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13813 return NULL;
13814 loc_result = NULL;
13815
13816 for (i = 0; i < XVECLEN (regs, 0); ++i)
13817 {
13818 dw_loc_descr_ref t;
13819
13820 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13821 VAR_INIT_STATUS_INITIALIZED);
13822 add_loc_descr (&loc_result, t);
13823 add_loc_descr_op_piece (&loc_result, size);
13824 }
13825
13826 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13827 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13828 return loc_result;
13829 }
13830
13831 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13832
13833 /* Return a location descriptor that designates a constant i,
13834 as a compound operation from constant (i >> shift), constant shift
13835 and DW_OP_shl. */
13836
13837 static dw_loc_descr_ref
13838 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13839 {
13840 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13841 add_loc_descr (&ret, int_loc_descriptor (shift));
13842 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13843 return ret;
13844 }
13845
13846 /* Return a location descriptor that designates constant POLY_I. */
13847
13848 static dw_loc_descr_ref
13849 int_loc_descriptor (poly_int64 poly_i)
13850 {
13851 enum dwarf_location_atom op;
13852
13853 HOST_WIDE_INT i;
13854 if (!poly_i.is_constant (&i))
13855 {
13856 /* Create location descriptions for the non-constant part and
13857 add any constant offset at the end. */
13858 dw_loc_descr_ref ret = NULL;
13859 HOST_WIDE_INT constant = poly_i.coeffs[0];
13860 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13861 {
13862 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13863 if (coeff != 0)
13864 {
13865 dw_loc_descr_ref start = ret;
13866 unsigned int factor;
13867 int bias;
13868 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13869 (j, &factor, &bias);
13870
13871 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13872 add COEFF * (REGNO / FACTOR) now and subtract
13873 COEFF * BIAS from the final constant part. */
13874 constant -= coeff * bias;
13875 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13876 if (coeff % factor == 0)
13877 coeff /= factor;
13878 else
13879 {
13880 int amount = exact_log2 (factor);
13881 gcc_assert (amount >= 0);
13882 add_loc_descr (&ret, int_loc_descriptor (amount));
13883 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13884 }
13885 if (coeff != 1)
13886 {
13887 add_loc_descr (&ret, int_loc_descriptor (coeff));
13888 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13889 }
13890 if (start)
13891 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13892 }
13893 }
13894 loc_descr_plus_const (&ret, constant);
13895 return ret;
13896 }
13897
13898 /* Pick the smallest representation of a constant, rather than just
13899 defaulting to the LEB encoding. */
13900 if (i >= 0)
13901 {
13902 int clz = clz_hwi (i);
13903 int ctz = ctz_hwi (i);
13904 if (i <= 31)
13905 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13906 else if (i <= 0xff)
13907 op = DW_OP_const1u;
13908 else if (i <= 0xffff)
13909 op = DW_OP_const2u;
13910 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13911 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13912 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13913 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13914 while DW_OP_const4u is 5 bytes. */
13915 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13916 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13917 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13918 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13919 while DW_OP_const4u is 5 bytes. */
13920 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13921
13922 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13923 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13924 <= 4)
13925 {
13926 /* As i >= 2**31, the double cast above will yield a negative number.
13927 Since wrapping is defined in DWARF expressions we can output big
13928 positive integers as small negative ones, regardless of the size
13929 of host wide ints.
13930
13931 Here, since the evaluator will handle 32-bit values and since i >=
13932 2**31, we know it's going to be interpreted as a negative literal:
13933 store it this way if we can do better than 5 bytes this way. */
13934 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13935 }
13936 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13937 op = DW_OP_const4u;
13938
13939 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13940 least 6 bytes: see if we can do better before falling back to it. */
13941 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13942 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13943 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13944 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13945 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13946 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13947 >= HOST_BITS_PER_WIDE_INT)
13948 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13949 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13950 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13951 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13952 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13953 && size_of_uleb128 (i) > 6)
13954 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13955 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13956 else
13957 op = DW_OP_constu;
13958 }
13959 else
13960 {
13961 if (i >= -0x80)
13962 op = DW_OP_const1s;
13963 else if (i >= -0x8000)
13964 op = DW_OP_const2s;
13965 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13966 {
13967 if (size_of_int_loc_descriptor (i) < 5)
13968 {
13969 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13970 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13971 return ret;
13972 }
13973 op = DW_OP_const4s;
13974 }
13975 else
13976 {
13977 if (size_of_int_loc_descriptor (i)
13978 < (unsigned long) 1 + size_of_sleb128 (i))
13979 {
13980 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13981 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13982 return ret;
13983 }
13984 op = DW_OP_consts;
13985 }
13986 }
13987
13988 return new_loc_descr (op, i, 0);
13989 }
13990
13991 /* Likewise, for unsigned constants. */
13992
13993 static dw_loc_descr_ref
13994 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
13995 {
13996 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
13997 const unsigned HOST_WIDE_INT max_uint
13998 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
13999
14000 /* If possible, use the clever signed constants handling. */
14001 if (i <= max_int)
14002 return int_loc_descriptor ((HOST_WIDE_INT) i);
14003
14004 /* Here, we are left with positive numbers that cannot be represented as
14005 HOST_WIDE_INT, i.e.:
14006 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14007
14008 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14009 whereas may be better to output a negative integer: thanks to integer
14010 wrapping, we know that:
14011 x = x - 2 ** DWARF2_ADDR_SIZE
14012 = x - 2 * (max (HOST_WIDE_INT) + 1)
14013 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14014 small negative integers. Let's try that in cases it will clearly improve
14015 the encoding: there is no gain turning DW_OP_const4u into
14016 DW_OP_const4s. */
14017 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14018 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14019 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14020 {
14021 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14022
14023 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14024 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14025 const HOST_WIDE_INT second_shift
14026 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14027
14028 /* So we finally have:
14029 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14030 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14031 return int_loc_descriptor (second_shift);
14032 }
14033
14034 /* Last chance: fallback to a simple constant operation. */
14035 return new_loc_descr
14036 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14037 ? DW_OP_const4u
14038 : DW_OP_const8u,
14039 i, 0);
14040 }
14041
14042 /* Generate and return a location description that computes the unsigned
14043 comparison of the two stack top entries (a OP b where b is the top-most
14044 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14045 LE_EXPR, GT_EXPR or GE_EXPR. */
14046
14047 static dw_loc_descr_ref
14048 uint_comparison_loc_list (enum tree_code kind)
14049 {
14050 enum dwarf_location_atom op, flip_op;
14051 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14052
14053 switch (kind)
14054 {
14055 case LT_EXPR:
14056 op = DW_OP_lt;
14057 break;
14058 case LE_EXPR:
14059 op = DW_OP_le;
14060 break;
14061 case GT_EXPR:
14062 op = DW_OP_gt;
14063 break;
14064 case GE_EXPR:
14065 op = DW_OP_ge;
14066 break;
14067 default:
14068 gcc_unreachable ();
14069 }
14070
14071 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14072 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14073
14074 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14075 possible to perform unsigned comparisons: we just have to distinguish
14076 three cases:
14077
14078 1. when a and b have the same sign (as signed integers); then we should
14079 return: a OP(signed) b;
14080
14081 2. when a is a negative signed integer while b is a positive one, then a
14082 is a greater unsigned integer than b; likewise when a and b's roles
14083 are flipped.
14084
14085 So first, compare the sign of the two operands. */
14086 ret = new_loc_descr (DW_OP_over, 0, 0);
14087 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14088 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14089 /* If they have different signs (i.e. they have different sign bits), then
14090 the stack top value has now the sign bit set and thus it's smaller than
14091 zero. */
14092 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14093 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14094 add_loc_descr (&ret, bra_node);
14095
14096 /* We are in case 1. At this point, we know both operands have the same
14097 sign, to it's safe to use the built-in signed comparison. */
14098 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14099 add_loc_descr (&ret, jmp_node);
14100
14101 /* We are in case 2. Here, we know both operands do not have the same sign,
14102 so we have to flip the signed comparison. */
14103 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14104 tmp = new_loc_descr (flip_op, 0, 0);
14105 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14106 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14107 add_loc_descr (&ret, tmp);
14108
14109 /* This dummy operation is necessary to make the two branches join. */
14110 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14111 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14112 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14113 add_loc_descr (&ret, tmp);
14114
14115 return ret;
14116 }
14117
14118 /* Likewise, but takes the location description lists (might be destructive on
14119 them). Return NULL if either is NULL or if concatenation fails. */
14120
14121 static dw_loc_list_ref
14122 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14123 enum tree_code kind)
14124 {
14125 if (left == NULL || right == NULL)
14126 return NULL;
14127
14128 add_loc_list (&left, right);
14129 if (left == NULL)
14130 return NULL;
14131
14132 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14133 return left;
14134 }
14135
14136 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14137 without actually allocating it. */
14138
14139 static unsigned long
14140 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14141 {
14142 return size_of_int_loc_descriptor (i >> shift)
14143 + size_of_int_loc_descriptor (shift)
14144 + 1;
14145 }
14146
14147 /* Return size_of_locs (int_loc_descriptor (i)) without
14148 actually allocating it. */
14149
14150 static unsigned long
14151 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14152 {
14153 unsigned long s;
14154
14155 if (i >= 0)
14156 {
14157 int clz, ctz;
14158 if (i <= 31)
14159 return 1;
14160 else if (i <= 0xff)
14161 return 2;
14162 else if (i <= 0xffff)
14163 return 3;
14164 clz = clz_hwi (i);
14165 ctz = ctz_hwi (i);
14166 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14167 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14168 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14169 - clz - 5);
14170 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14171 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14172 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14173 - clz - 8);
14174 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14175 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14176 <= 4)
14177 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14178 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14179 return 5;
14180 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14181 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14182 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14183 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14184 - clz - 8);
14185 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14186 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14187 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14188 - clz - 16);
14189 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14190 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14191 && s > 6)
14192 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14193 - clz - 32);
14194 else
14195 return 1 + s;
14196 }
14197 else
14198 {
14199 if (i >= -0x80)
14200 return 2;
14201 else if (i >= -0x8000)
14202 return 3;
14203 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14204 {
14205 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14206 {
14207 s = size_of_int_loc_descriptor (-i) + 1;
14208 if (s < 5)
14209 return s;
14210 }
14211 return 5;
14212 }
14213 else
14214 {
14215 unsigned long r = 1 + size_of_sleb128 (i);
14216 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14217 {
14218 s = size_of_int_loc_descriptor (-i) + 1;
14219 if (s < r)
14220 return s;
14221 }
14222 return r;
14223 }
14224 }
14225 }
14226
14227 /* Return loc description representing "address" of integer value.
14228 This can appear only as toplevel expression. */
14229
14230 static dw_loc_descr_ref
14231 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14232 {
14233 int litsize;
14234 dw_loc_descr_ref loc_result = NULL;
14235
14236 if (!(dwarf_version >= 4 || !dwarf_strict))
14237 return NULL;
14238
14239 litsize = size_of_int_loc_descriptor (i);
14240 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14241 is more compact. For DW_OP_stack_value we need:
14242 litsize + 1 (DW_OP_stack_value)
14243 and for DW_OP_implicit_value:
14244 1 (DW_OP_implicit_value) + 1 (length) + size. */
14245 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14246 {
14247 loc_result = int_loc_descriptor (i);
14248 add_loc_descr (&loc_result,
14249 new_loc_descr (DW_OP_stack_value, 0, 0));
14250 return loc_result;
14251 }
14252
14253 loc_result = new_loc_descr (DW_OP_implicit_value,
14254 size, 0);
14255 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14256 loc_result->dw_loc_oprnd2.v.val_int = i;
14257 return loc_result;
14258 }
14259
14260 /* Return a location descriptor that designates a base+offset location. */
14261
14262 static dw_loc_descr_ref
14263 based_loc_descr (rtx reg, poly_int64 offset,
14264 enum var_init_status initialized)
14265 {
14266 unsigned int regno;
14267 dw_loc_descr_ref result;
14268 dw_fde_ref fde = cfun->fde;
14269
14270 /* We only use "frame base" when we're sure we're talking about the
14271 post-prologue local stack frame. We do this by *not* running
14272 register elimination until this point, and recognizing the special
14273 argument pointer and soft frame pointer rtx's. */
14274 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14275 {
14276 rtx elim = (ira_use_lra_p
14277 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14278 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14279
14280 if (elim != reg)
14281 {
14282 elim = strip_offset_and_add (elim, &offset);
14283 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
14284 && (elim == hard_frame_pointer_rtx
14285 || elim == stack_pointer_rtx))
14286 || elim == (frame_pointer_needed
14287 ? hard_frame_pointer_rtx
14288 : stack_pointer_rtx));
14289
14290 /* If drap register is used to align stack, use frame
14291 pointer + offset to access stack variables. If stack
14292 is aligned without drap, use stack pointer + offset to
14293 access stack variables. */
14294 if (crtl->stack_realign_tried
14295 && reg == frame_pointer_rtx)
14296 {
14297 int base_reg
14298 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14299 ? HARD_FRAME_POINTER_REGNUM
14300 : REGNO (elim));
14301 return new_reg_loc_descr (base_reg, offset);
14302 }
14303
14304 gcc_assert (frame_pointer_fb_offset_valid);
14305 offset += frame_pointer_fb_offset;
14306 HOST_WIDE_INT const_offset;
14307 if (offset.is_constant (&const_offset))
14308 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14309 else
14310 {
14311 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14312 loc_descr_plus_const (&ret, offset);
14313 return ret;
14314 }
14315 }
14316 }
14317
14318 regno = REGNO (reg);
14319 #ifdef LEAF_REG_REMAP
14320 if (crtl->uses_only_leaf_regs)
14321 {
14322 int leaf_reg = LEAF_REG_REMAP (regno);
14323 if (leaf_reg != -1)
14324 regno = (unsigned) leaf_reg;
14325 }
14326 #endif
14327 regno = DWARF_FRAME_REGNUM (regno);
14328
14329 HOST_WIDE_INT const_offset;
14330 if (!optimize && fde
14331 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14332 && offset.is_constant (&const_offset))
14333 {
14334 /* Use cfa+offset to represent the location of arguments passed
14335 on the stack when drap is used to align stack.
14336 Only do this when not optimizing, for optimized code var-tracking
14337 is supposed to track where the arguments live and the register
14338 used as vdrap or drap in some spot might be used for something
14339 else in other part of the routine. */
14340 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14341 }
14342
14343 result = new_reg_loc_descr (regno, offset);
14344
14345 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14346 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14347
14348 return result;
14349 }
14350
14351 /* Return true if this RTL expression describes a base+offset calculation. */
14352
14353 static inline int
14354 is_based_loc (const_rtx rtl)
14355 {
14356 return (GET_CODE (rtl) == PLUS
14357 && ((REG_P (XEXP (rtl, 0))
14358 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14359 && CONST_INT_P (XEXP (rtl, 1)))));
14360 }
14361
14362 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14363 failed. */
14364
14365 static dw_loc_descr_ref
14366 tls_mem_loc_descriptor (rtx mem)
14367 {
14368 tree base;
14369 dw_loc_descr_ref loc_result;
14370
14371 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14372 return NULL;
14373
14374 base = get_base_address (MEM_EXPR (mem));
14375 if (base == NULL
14376 || !VAR_P (base)
14377 || !DECL_THREAD_LOCAL_P (base))
14378 return NULL;
14379
14380 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14381 if (loc_result == NULL)
14382 return NULL;
14383
14384 if (maybe_ne (MEM_OFFSET (mem), 0))
14385 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14386
14387 return loc_result;
14388 }
14389
14390 /* Output debug info about reason why we failed to expand expression as dwarf
14391 expression. */
14392
14393 static void
14394 expansion_failed (tree expr, rtx rtl, char const *reason)
14395 {
14396 if (dump_file && (dump_flags & TDF_DETAILS))
14397 {
14398 fprintf (dump_file, "Failed to expand as dwarf: ");
14399 if (expr)
14400 print_generic_expr (dump_file, expr, dump_flags);
14401 if (rtl)
14402 {
14403 fprintf (dump_file, "\n");
14404 print_rtl (dump_file, rtl);
14405 }
14406 fprintf (dump_file, "\nReason: %s\n", reason);
14407 }
14408 }
14409
14410 /* Helper function for const_ok_for_output. */
14411
14412 static bool
14413 const_ok_for_output_1 (rtx rtl)
14414 {
14415 if (targetm.const_not_ok_for_debug_p (rtl))
14416 {
14417 if (GET_CODE (rtl) != UNSPEC)
14418 {
14419 expansion_failed (NULL_TREE, rtl,
14420 "Expression rejected for debug by the backend.\n");
14421 return false;
14422 }
14423
14424 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14425 the target hook doesn't explicitly allow it in debug info, assume
14426 we can't express it in the debug info. */
14427 /* Don't complain about TLS UNSPECs, those are just too hard to
14428 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14429 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14430 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14431 if (flag_checking
14432 && (XVECLEN (rtl, 0) == 0
14433 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14434 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14435 inform (current_function_decl
14436 ? DECL_SOURCE_LOCATION (current_function_decl)
14437 : UNKNOWN_LOCATION,
14438 #if NUM_UNSPEC_VALUES > 0
14439 "non-delegitimized UNSPEC %s (%d) found in variable location",
14440 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14441 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14442 XINT (rtl, 1));
14443 #else
14444 "non-delegitimized UNSPEC %d found in variable location",
14445 XINT (rtl, 1));
14446 #endif
14447 expansion_failed (NULL_TREE, rtl,
14448 "UNSPEC hasn't been delegitimized.\n");
14449 return false;
14450 }
14451
14452 if (CONST_POLY_INT_P (rtl))
14453 return false;
14454
14455 if (targetm.const_not_ok_for_debug_p (rtl))
14456 {
14457 expansion_failed (NULL_TREE, rtl,
14458 "Expression rejected for debug by the backend.\n");
14459 return false;
14460 }
14461
14462 /* FIXME: Refer to PR60655. It is possible for simplification
14463 of rtl expressions in var tracking to produce such expressions.
14464 We should really identify / validate expressions
14465 enclosed in CONST that can be handled by assemblers on various
14466 targets and only handle legitimate cases here. */
14467 switch (GET_CODE (rtl))
14468 {
14469 case SYMBOL_REF:
14470 break;
14471 case NOT:
14472 case NEG:
14473 return false;
14474 default:
14475 return true;
14476 }
14477
14478 if (CONSTANT_POOL_ADDRESS_P (rtl))
14479 {
14480 bool marked;
14481 get_pool_constant_mark (rtl, &marked);
14482 /* If all references to this pool constant were optimized away,
14483 it was not output and thus we can't represent it. */
14484 if (!marked)
14485 {
14486 expansion_failed (NULL_TREE, rtl,
14487 "Constant was removed from constant pool.\n");
14488 return false;
14489 }
14490 }
14491
14492 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14493 return false;
14494
14495 /* Avoid references to external symbols in debug info, on several targets
14496 the linker might even refuse to link when linking a shared library,
14497 and in many other cases the relocations for .debug_info/.debug_loc are
14498 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14499 to be defined within the same shared library or executable are fine. */
14500 if (SYMBOL_REF_EXTERNAL_P (rtl))
14501 {
14502 tree decl = SYMBOL_REF_DECL (rtl);
14503
14504 if (decl == NULL || !targetm.binds_local_p (decl))
14505 {
14506 expansion_failed (NULL_TREE, rtl,
14507 "Symbol not defined in current TU.\n");
14508 return false;
14509 }
14510 }
14511
14512 return true;
14513 }
14514
14515 /* Return true if constant RTL can be emitted in DW_OP_addr or
14516 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14517 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14518
14519 static bool
14520 const_ok_for_output (rtx rtl)
14521 {
14522 if (GET_CODE (rtl) == SYMBOL_REF)
14523 return const_ok_for_output_1 (rtl);
14524
14525 if (GET_CODE (rtl) == CONST)
14526 {
14527 subrtx_var_iterator::array_type array;
14528 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14529 if (!const_ok_for_output_1 (*iter))
14530 return false;
14531 return true;
14532 }
14533
14534 return true;
14535 }
14536
14537 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14538 if possible, NULL otherwise. */
14539
14540 static dw_die_ref
14541 base_type_for_mode (machine_mode mode, bool unsignedp)
14542 {
14543 dw_die_ref type_die;
14544 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14545
14546 if (type == NULL)
14547 return NULL;
14548 switch (TREE_CODE (type))
14549 {
14550 case INTEGER_TYPE:
14551 case REAL_TYPE:
14552 break;
14553 default:
14554 return NULL;
14555 }
14556 type_die = lookup_type_die (type);
14557 if (!type_die)
14558 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14559 comp_unit_die ());
14560 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14561 return NULL;
14562 return type_die;
14563 }
14564
14565 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14566 type matching MODE, or, if MODE is narrower than or as wide as
14567 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14568 possible. */
14569
14570 static dw_loc_descr_ref
14571 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14572 {
14573 machine_mode outer_mode = mode;
14574 dw_die_ref type_die;
14575 dw_loc_descr_ref cvt;
14576
14577 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14578 {
14579 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14580 return op;
14581 }
14582 type_die = base_type_for_mode (outer_mode, 1);
14583 if (type_die == NULL)
14584 return NULL;
14585 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14586 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14587 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14588 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14589 add_loc_descr (&op, cvt);
14590 return op;
14591 }
14592
14593 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14594
14595 static dw_loc_descr_ref
14596 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14597 dw_loc_descr_ref op1)
14598 {
14599 dw_loc_descr_ref ret = op0;
14600 add_loc_descr (&ret, op1);
14601 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14602 if (STORE_FLAG_VALUE != 1)
14603 {
14604 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14605 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14606 }
14607 return ret;
14608 }
14609
14610 /* Subroutine of scompare_loc_descriptor for the case in which we're
14611 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14612 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14613
14614 static dw_loc_descr_ref
14615 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14616 scalar_int_mode op_mode,
14617 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14618 {
14619 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14620 dw_loc_descr_ref cvt;
14621
14622 if (type_die == NULL)
14623 return NULL;
14624 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14625 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14626 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14627 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14628 add_loc_descr (&op0, cvt);
14629 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14630 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14631 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14632 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14633 add_loc_descr (&op1, cvt);
14634 return compare_loc_descriptor (op, op0, op1);
14635 }
14636
14637 /* Subroutine of scompare_loc_descriptor for the case in which we're
14638 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14639 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14640
14641 static dw_loc_descr_ref
14642 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14643 scalar_int_mode op_mode,
14644 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14645 {
14646 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14647 /* For eq/ne, if the operands are known to be zero-extended,
14648 there is no need to do the fancy shifting up. */
14649 if (op == DW_OP_eq || op == DW_OP_ne)
14650 {
14651 dw_loc_descr_ref last0, last1;
14652 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14653 ;
14654 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14655 ;
14656 /* deref_size zero extends, and for constants we can check
14657 whether they are zero extended or not. */
14658 if (((last0->dw_loc_opc == DW_OP_deref_size
14659 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14660 || (CONST_INT_P (XEXP (rtl, 0))
14661 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14662 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14663 && ((last1->dw_loc_opc == DW_OP_deref_size
14664 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14665 || (CONST_INT_P (XEXP (rtl, 1))
14666 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14667 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14668 return compare_loc_descriptor (op, op0, op1);
14669
14670 /* EQ/NE comparison against constant in narrower type than
14671 DWARF2_ADDR_SIZE can be performed either as
14672 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14673 DW_OP_{eq,ne}
14674 or
14675 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14676 DW_OP_{eq,ne}. Pick whatever is shorter. */
14677 if (CONST_INT_P (XEXP (rtl, 1))
14678 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14679 && (size_of_int_loc_descriptor (shift) + 1
14680 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14681 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14682 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14683 & GET_MODE_MASK (op_mode))))
14684 {
14685 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14686 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14687 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14688 & GET_MODE_MASK (op_mode));
14689 return compare_loc_descriptor (op, op0, op1);
14690 }
14691 }
14692 add_loc_descr (&op0, int_loc_descriptor (shift));
14693 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14694 if (CONST_INT_P (XEXP (rtl, 1)))
14695 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14696 else
14697 {
14698 add_loc_descr (&op1, int_loc_descriptor (shift));
14699 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14700 }
14701 return compare_loc_descriptor (op, op0, op1);
14702 }
14703
14704 /* Return location descriptor for unsigned comparison OP RTL. */
14705
14706 static dw_loc_descr_ref
14707 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14708 machine_mode mem_mode)
14709 {
14710 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14711 dw_loc_descr_ref op0, op1;
14712
14713 if (op_mode == VOIDmode)
14714 op_mode = GET_MODE (XEXP (rtl, 1));
14715 if (op_mode == VOIDmode)
14716 return NULL;
14717
14718 scalar_int_mode int_op_mode;
14719 if (dwarf_strict
14720 && dwarf_version < 5
14721 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14722 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14723 return NULL;
14724
14725 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14726 VAR_INIT_STATUS_INITIALIZED);
14727 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14728 VAR_INIT_STATUS_INITIALIZED);
14729
14730 if (op0 == NULL || op1 == NULL)
14731 return NULL;
14732
14733 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14734 {
14735 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14736 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14737
14738 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14739 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14740 }
14741 return compare_loc_descriptor (op, op0, op1);
14742 }
14743
14744 /* Return location descriptor for unsigned comparison OP RTL. */
14745
14746 static dw_loc_descr_ref
14747 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14748 machine_mode mem_mode)
14749 {
14750 dw_loc_descr_ref op0, op1;
14751
14752 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14753 if (test_op_mode == VOIDmode)
14754 test_op_mode = GET_MODE (XEXP (rtl, 1));
14755
14756 scalar_int_mode op_mode;
14757 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14758 return NULL;
14759
14760 if (dwarf_strict
14761 && dwarf_version < 5
14762 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14763 return NULL;
14764
14765 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14766 VAR_INIT_STATUS_INITIALIZED);
14767 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14768 VAR_INIT_STATUS_INITIALIZED);
14769
14770 if (op0 == NULL || op1 == NULL)
14771 return NULL;
14772
14773 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14774 {
14775 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14776 dw_loc_descr_ref last0, last1;
14777 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14778 ;
14779 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14780 ;
14781 if (CONST_INT_P (XEXP (rtl, 0)))
14782 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14783 /* deref_size zero extends, so no need to mask it again. */
14784 else if (last0->dw_loc_opc != DW_OP_deref_size
14785 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14786 {
14787 add_loc_descr (&op0, int_loc_descriptor (mask));
14788 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14789 }
14790 if (CONST_INT_P (XEXP (rtl, 1)))
14791 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14792 /* deref_size zero extends, so no need to mask it again. */
14793 else if (last1->dw_loc_opc != DW_OP_deref_size
14794 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14795 {
14796 add_loc_descr (&op1, int_loc_descriptor (mask));
14797 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14798 }
14799 }
14800 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14801 {
14802 HOST_WIDE_INT bias = 1;
14803 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14804 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14805 if (CONST_INT_P (XEXP (rtl, 1)))
14806 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14807 + INTVAL (XEXP (rtl, 1)));
14808 else
14809 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14810 bias, 0));
14811 }
14812 return compare_loc_descriptor (op, op0, op1);
14813 }
14814
14815 /* Return location descriptor for {U,S}{MIN,MAX}. */
14816
14817 static dw_loc_descr_ref
14818 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14819 machine_mode mem_mode)
14820 {
14821 enum dwarf_location_atom op;
14822 dw_loc_descr_ref op0, op1, ret;
14823 dw_loc_descr_ref bra_node, drop_node;
14824
14825 scalar_int_mode int_mode;
14826 if (dwarf_strict
14827 && dwarf_version < 5
14828 && (!is_a <scalar_int_mode> (mode, &int_mode)
14829 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14830 return NULL;
14831
14832 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14833 VAR_INIT_STATUS_INITIALIZED);
14834 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14835 VAR_INIT_STATUS_INITIALIZED);
14836
14837 if (op0 == NULL || op1 == NULL)
14838 return NULL;
14839
14840 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14841 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14842 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14843 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14844 {
14845 /* Checked by the caller. */
14846 int_mode = as_a <scalar_int_mode> (mode);
14847 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14848 {
14849 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14850 add_loc_descr (&op0, int_loc_descriptor (mask));
14851 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14852 add_loc_descr (&op1, int_loc_descriptor (mask));
14853 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14854 }
14855 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14856 {
14857 HOST_WIDE_INT bias = 1;
14858 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14859 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14860 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14861 }
14862 }
14863 else if (is_a <scalar_int_mode> (mode, &int_mode)
14864 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14865 {
14866 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14867 add_loc_descr (&op0, int_loc_descriptor (shift));
14868 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14869 add_loc_descr (&op1, int_loc_descriptor (shift));
14870 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14871 }
14872 else if (is_a <scalar_int_mode> (mode, &int_mode)
14873 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14874 {
14875 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14876 dw_loc_descr_ref cvt;
14877 if (type_die == NULL)
14878 return NULL;
14879 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14880 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14881 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14882 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14883 add_loc_descr (&op0, cvt);
14884 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14885 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14886 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14887 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14888 add_loc_descr (&op1, cvt);
14889 }
14890
14891 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14892 op = DW_OP_lt;
14893 else
14894 op = DW_OP_gt;
14895 ret = op0;
14896 add_loc_descr (&ret, op1);
14897 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14898 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14899 add_loc_descr (&ret, bra_node);
14900 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14901 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14902 add_loc_descr (&ret, drop_node);
14903 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14904 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14905 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14906 && is_a <scalar_int_mode> (mode, &int_mode)
14907 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14908 ret = convert_descriptor_to_mode (int_mode, ret);
14909 return ret;
14910 }
14911
14912 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14913 but after converting arguments to type_die, afterwards
14914 convert back to unsigned. */
14915
14916 static dw_loc_descr_ref
14917 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14918 scalar_int_mode mode, machine_mode mem_mode)
14919 {
14920 dw_loc_descr_ref cvt, op0, op1;
14921
14922 if (type_die == NULL)
14923 return NULL;
14924 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14925 VAR_INIT_STATUS_INITIALIZED);
14926 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14927 VAR_INIT_STATUS_INITIALIZED);
14928 if (op0 == NULL || op1 == NULL)
14929 return NULL;
14930 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14931 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14932 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14933 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14934 add_loc_descr (&op0, cvt);
14935 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14936 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14937 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14938 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14939 add_loc_descr (&op1, cvt);
14940 add_loc_descr (&op0, op1);
14941 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14942 return convert_descriptor_to_mode (mode, op0);
14943 }
14944
14945 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14946 const0 is DW_OP_lit0 or corresponding typed constant,
14947 const1 is DW_OP_lit1 or corresponding typed constant
14948 and constMSB is constant with just the MSB bit set
14949 for the mode):
14950 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14951 L1: const0 DW_OP_swap
14952 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14953 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14954 L3: DW_OP_drop
14955 L4: DW_OP_nop
14956
14957 CTZ is similar:
14958 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14959 L1: const0 DW_OP_swap
14960 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14961 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14962 L3: DW_OP_drop
14963 L4: DW_OP_nop
14964
14965 FFS is similar:
14966 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14967 L1: const1 DW_OP_swap
14968 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14969 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14970 L3: DW_OP_drop
14971 L4: DW_OP_nop */
14972
14973 static dw_loc_descr_ref
14974 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14975 machine_mode mem_mode)
14976 {
14977 dw_loc_descr_ref op0, ret, tmp;
14978 HOST_WIDE_INT valv;
14979 dw_loc_descr_ref l1jump, l1label;
14980 dw_loc_descr_ref l2jump, l2label;
14981 dw_loc_descr_ref l3jump, l3label;
14982 dw_loc_descr_ref l4jump, l4label;
14983 rtx msb;
14984
14985 if (GET_MODE (XEXP (rtl, 0)) != mode)
14986 return NULL;
14987
14988 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14989 VAR_INIT_STATUS_INITIALIZED);
14990 if (op0 == NULL)
14991 return NULL;
14992 ret = op0;
14993 if (GET_CODE (rtl) == CLZ)
14994 {
14995 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14996 valv = GET_MODE_BITSIZE (mode);
14997 }
14998 else if (GET_CODE (rtl) == FFS)
14999 valv = 0;
15000 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15001 valv = GET_MODE_BITSIZE (mode);
15002 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15003 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15004 add_loc_descr (&ret, l1jump);
15005 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15006 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15007 VAR_INIT_STATUS_INITIALIZED);
15008 if (tmp == NULL)
15009 return NULL;
15010 add_loc_descr (&ret, tmp);
15011 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15012 add_loc_descr (&ret, l4jump);
15013 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15014 ? const1_rtx : const0_rtx,
15015 mode, mem_mode,
15016 VAR_INIT_STATUS_INITIALIZED);
15017 if (l1label == NULL)
15018 return NULL;
15019 add_loc_descr (&ret, l1label);
15020 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15021 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15022 add_loc_descr (&ret, l2label);
15023 if (GET_CODE (rtl) != CLZ)
15024 msb = const1_rtx;
15025 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15026 msb = GEN_INT (HOST_WIDE_INT_1U
15027 << (GET_MODE_BITSIZE (mode) - 1));
15028 else
15029 msb = immed_wide_int_const
15030 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15031 GET_MODE_PRECISION (mode)), mode);
15032 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15033 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15034 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15035 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15036 else
15037 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15038 VAR_INIT_STATUS_INITIALIZED);
15039 if (tmp == NULL)
15040 return NULL;
15041 add_loc_descr (&ret, tmp);
15042 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15043 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15044 add_loc_descr (&ret, l3jump);
15045 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15046 VAR_INIT_STATUS_INITIALIZED);
15047 if (tmp == NULL)
15048 return NULL;
15049 add_loc_descr (&ret, tmp);
15050 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15051 ? DW_OP_shl : DW_OP_shr, 0, 0));
15052 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15053 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15054 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15055 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15056 add_loc_descr (&ret, l2jump);
15057 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15058 add_loc_descr (&ret, l3label);
15059 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15060 add_loc_descr (&ret, l4label);
15061 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15062 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15063 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15064 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15065 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15066 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15067 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15068 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15069 return ret;
15070 }
15071
15072 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15073 const1 is DW_OP_lit1 or corresponding typed constant):
15074 const0 DW_OP_swap
15075 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15076 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15077 L2: DW_OP_drop
15078
15079 PARITY is similar:
15080 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15081 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15082 L2: DW_OP_drop */
15083
15084 static dw_loc_descr_ref
15085 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15086 machine_mode mem_mode)
15087 {
15088 dw_loc_descr_ref op0, ret, tmp;
15089 dw_loc_descr_ref l1jump, l1label;
15090 dw_loc_descr_ref l2jump, l2label;
15091
15092 if (GET_MODE (XEXP (rtl, 0)) != mode)
15093 return NULL;
15094
15095 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15096 VAR_INIT_STATUS_INITIALIZED);
15097 if (op0 == NULL)
15098 return NULL;
15099 ret = op0;
15100 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15101 VAR_INIT_STATUS_INITIALIZED);
15102 if (tmp == NULL)
15103 return NULL;
15104 add_loc_descr (&ret, tmp);
15105 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15106 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15107 add_loc_descr (&ret, l1label);
15108 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15109 add_loc_descr (&ret, l2jump);
15110 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15111 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15112 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15113 VAR_INIT_STATUS_INITIALIZED);
15114 if (tmp == NULL)
15115 return NULL;
15116 add_loc_descr (&ret, tmp);
15117 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15118 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15119 ? DW_OP_plus : DW_OP_xor, 0, 0));
15120 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15121 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15122 VAR_INIT_STATUS_INITIALIZED);
15123 add_loc_descr (&ret, tmp);
15124 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15125 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15126 add_loc_descr (&ret, l1jump);
15127 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15128 add_loc_descr (&ret, l2label);
15129 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15130 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15131 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15132 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15133 return ret;
15134 }
15135
15136 /* BSWAP (constS is initial shift count, either 56 or 24):
15137 constS const0
15138 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15139 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15140 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15141 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15142 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15143
15144 static dw_loc_descr_ref
15145 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15146 machine_mode mem_mode)
15147 {
15148 dw_loc_descr_ref op0, ret, tmp;
15149 dw_loc_descr_ref l1jump, l1label;
15150 dw_loc_descr_ref l2jump, l2label;
15151
15152 if (BITS_PER_UNIT != 8
15153 || (GET_MODE_BITSIZE (mode) != 32
15154 && GET_MODE_BITSIZE (mode) != 64))
15155 return NULL;
15156
15157 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15158 VAR_INIT_STATUS_INITIALIZED);
15159 if (op0 == NULL)
15160 return NULL;
15161
15162 ret = op0;
15163 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15164 mode, mem_mode,
15165 VAR_INIT_STATUS_INITIALIZED);
15166 if (tmp == NULL)
15167 return NULL;
15168 add_loc_descr (&ret, tmp);
15169 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15170 VAR_INIT_STATUS_INITIALIZED);
15171 if (tmp == NULL)
15172 return NULL;
15173 add_loc_descr (&ret, tmp);
15174 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15175 add_loc_descr (&ret, l1label);
15176 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15177 mode, mem_mode,
15178 VAR_INIT_STATUS_INITIALIZED);
15179 add_loc_descr (&ret, tmp);
15180 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15181 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15182 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15183 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15184 VAR_INIT_STATUS_INITIALIZED);
15185 if (tmp == NULL)
15186 return NULL;
15187 add_loc_descr (&ret, tmp);
15188 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15189 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15190 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15191 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15192 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15193 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15194 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15195 VAR_INIT_STATUS_INITIALIZED);
15196 add_loc_descr (&ret, tmp);
15197 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15198 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15199 add_loc_descr (&ret, l2jump);
15200 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15201 VAR_INIT_STATUS_INITIALIZED);
15202 add_loc_descr (&ret, tmp);
15203 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15204 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15205 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15206 add_loc_descr (&ret, l1jump);
15207 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15208 add_loc_descr (&ret, l2label);
15209 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15210 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15211 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15212 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15213 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15214 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15215 return ret;
15216 }
15217
15218 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15219 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15220 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15221 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15222
15223 ROTATERT is similar:
15224 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15225 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15226 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15227
15228 static dw_loc_descr_ref
15229 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15230 machine_mode mem_mode)
15231 {
15232 rtx rtlop1 = XEXP (rtl, 1);
15233 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15234 int i;
15235
15236 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15237 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15238 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15239 VAR_INIT_STATUS_INITIALIZED);
15240 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15241 VAR_INIT_STATUS_INITIALIZED);
15242 if (op0 == NULL || op1 == NULL)
15243 return NULL;
15244 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15245 for (i = 0; i < 2; i++)
15246 {
15247 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15248 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15249 mode, mem_mode,
15250 VAR_INIT_STATUS_INITIALIZED);
15251 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15252 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15253 ? DW_OP_const4u
15254 : HOST_BITS_PER_WIDE_INT == 64
15255 ? DW_OP_const8u : DW_OP_constu,
15256 GET_MODE_MASK (mode), 0);
15257 else
15258 mask[i] = NULL;
15259 if (mask[i] == NULL)
15260 return NULL;
15261 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15262 }
15263 ret = op0;
15264 add_loc_descr (&ret, op1);
15265 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15266 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15267 if (GET_CODE (rtl) == ROTATERT)
15268 {
15269 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15270 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15271 GET_MODE_BITSIZE (mode), 0));
15272 }
15273 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15274 if (mask[0] != NULL)
15275 add_loc_descr (&ret, mask[0]);
15276 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15277 if (mask[1] != NULL)
15278 {
15279 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15280 add_loc_descr (&ret, mask[1]);
15281 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15282 }
15283 if (GET_CODE (rtl) == ROTATE)
15284 {
15285 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15286 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15287 GET_MODE_BITSIZE (mode), 0));
15288 }
15289 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15290 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15291 return ret;
15292 }
15293
15294 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15295 for DEBUG_PARAMETER_REF RTL. */
15296
15297 static dw_loc_descr_ref
15298 parameter_ref_descriptor (rtx rtl)
15299 {
15300 dw_loc_descr_ref ret;
15301 dw_die_ref ref;
15302
15303 if (dwarf_strict)
15304 return NULL;
15305 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15306 /* With LTO during LTRANS we get the late DIE that refers to the early
15307 DIE, thus we add another indirection here. This seems to confuse
15308 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15309 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15310 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15311 if (ref)
15312 {
15313 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15314 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15315 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15316 }
15317 else
15318 {
15319 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15320 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15321 }
15322 return ret;
15323 }
15324
15325 /* The following routine converts the RTL for a variable or parameter
15326 (resident in memory) into an equivalent Dwarf representation of a
15327 mechanism for getting the address of that same variable onto the top of a
15328 hypothetical "address evaluation" stack.
15329
15330 When creating memory location descriptors, we are effectively transforming
15331 the RTL for a memory-resident object into its Dwarf postfix expression
15332 equivalent. This routine recursively descends an RTL tree, turning
15333 it into Dwarf postfix code as it goes.
15334
15335 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15336
15337 MEM_MODE is the mode of the memory reference, needed to handle some
15338 autoincrement addressing modes.
15339
15340 Return 0 if we can't represent the location. */
15341
15342 dw_loc_descr_ref
15343 mem_loc_descriptor (rtx rtl, machine_mode mode,
15344 machine_mode mem_mode,
15345 enum var_init_status initialized)
15346 {
15347 dw_loc_descr_ref mem_loc_result = NULL;
15348 enum dwarf_location_atom op;
15349 dw_loc_descr_ref op0, op1;
15350 rtx inner = NULL_RTX;
15351 poly_int64 offset;
15352
15353 if (mode == VOIDmode)
15354 mode = GET_MODE (rtl);
15355
15356 /* Note that for a dynamically sized array, the location we will generate a
15357 description of here will be the lowest numbered location which is
15358 actually within the array. That's *not* necessarily the same as the
15359 zeroth element of the array. */
15360
15361 rtl = targetm.delegitimize_address (rtl);
15362
15363 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15364 return NULL;
15365
15366 scalar_int_mode int_mode, inner_mode, op1_mode;
15367 switch (GET_CODE (rtl))
15368 {
15369 case POST_INC:
15370 case POST_DEC:
15371 case POST_MODIFY:
15372 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15373
15374 case SUBREG:
15375 /* The case of a subreg may arise when we have a local (register)
15376 variable or a formal (register) parameter which doesn't quite fill
15377 up an entire register. For now, just assume that it is
15378 legitimate to make the Dwarf info refer to the whole register which
15379 contains the given subreg. */
15380 if (!subreg_lowpart_p (rtl))
15381 break;
15382 inner = SUBREG_REG (rtl);
15383 /* FALLTHRU */
15384 case TRUNCATE:
15385 if (inner == NULL_RTX)
15386 inner = XEXP (rtl, 0);
15387 if (is_a <scalar_int_mode> (mode, &int_mode)
15388 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15389 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15390 #ifdef POINTERS_EXTEND_UNSIGNED
15391 || (int_mode == Pmode && mem_mode != VOIDmode)
15392 #endif
15393 )
15394 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15395 {
15396 mem_loc_result = mem_loc_descriptor (inner,
15397 inner_mode,
15398 mem_mode, initialized);
15399 break;
15400 }
15401 if (dwarf_strict && dwarf_version < 5)
15402 break;
15403 if (is_a <scalar_int_mode> (mode, &int_mode)
15404 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15405 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15406 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15407 {
15408 dw_die_ref type_die;
15409 dw_loc_descr_ref cvt;
15410
15411 mem_loc_result = mem_loc_descriptor (inner,
15412 GET_MODE (inner),
15413 mem_mode, initialized);
15414 if (mem_loc_result == NULL)
15415 break;
15416 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15417 if (type_die == NULL)
15418 {
15419 mem_loc_result = NULL;
15420 break;
15421 }
15422 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15423 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15424 else
15425 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15426 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15427 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15428 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15429 add_loc_descr (&mem_loc_result, cvt);
15430 if (is_a <scalar_int_mode> (mode, &int_mode)
15431 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15432 {
15433 /* Convert it to untyped afterwards. */
15434 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15435 add_loc_descr (&mem_loc_result, cvt);
15436 }
15437 }
15438 break;
15439
15440 case REG:
15441 if (!is_a <scalar_int_mode> (mode, &int_mode)
15442 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15443 && rtl != arg_pointer_rtx
15444 && rtl != frame_pointer_rtx
15445 #ifdef POINTERS_EXTEND_UNSIGNED
15446 && (int_mode != Pmode || mem_mode == VOIDmode)
15447 #endif
15448 ))
15449 {
15450 dw_die_ref type_die;
15451 unsigned int dbx_regnum;
15452
15453 if (dwarf_strict && dwarf_version < 5)
15454 break;
15455 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
15456 break;
15457 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15458 if (type_die == NULL)
15459 break;
15460
15461 dbx_regnum = dbx_reg_number (rtl);
15462 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15463 break;
15464 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15465 dbx_regnum, 0);
15466 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15467 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15468 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15469 break;
15470 }
15471 /* Whenever a register number forms a part of the description of the
15472 method for calculating the (dynamic) address of a memory resident
15473 object, DWARF rules require the register number be referred to as
15474 a "base register". This distinction is not based in any way upon
15475 what category of register the hardware believes the given register
15476 belongs to. This is strictly DWARF terminology we're dealing with
15477 here. Note that in cases where the location of a memory-resident
15478 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15479 OP_CONST (0)) the actual DWARF location descriptor that we generate
15480 may just be OP_BASEREG (basereg). This may look deceptively like
15481 the object in question was allocated to a register (rather than in
15482 memory) so DWARF consumers need to be aware of the subtle
15483 distinction between OP_REG and OP_BASEREG. */
15484 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15485 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15486 else if (stack_realign_drap
15487 && crtl->drap_reg
15488 && crtl->args.internal_arg_pointer == rtl
15489 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15490 {
15491 /* If RTL is internal_arg_pointer, which has been optimized
15492 out, use DRAP instead. */
15493 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15494 VAR_INIT_STATUS_INITIALIZED);
15495 }
15496 break;
15497
15498 case SIGN_EXTEND:
15499 case ZERO_EXTEND:
15500 if (!is_a <scalar_int_mode> (mode, &int_mode)
15501 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15502 break;
15503 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15504 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15505 if (op0 == 0)
15506 break;
15507 else if (GET_CODE (rtl) == ZERO_EXTEND
15508 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15509 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15510 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15511 to expand zero extend as two shifts instead of
15512 masking. */
15513 && GET_MODE_SIZE (inner_mode) <= 4)
15514 {
15515 mem_loc_result = op0;
15516 add_loc_descr (&mem_loc_result,
15517 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15518 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15519 }
15520 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15521 {
15522 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15523 shift *= BITS_PER_UNIT;
15524 if (GET_CODE (rtl) == SIGN_EXTEND)
15525 op = DW_OP_shra;
15526 else
15527 op = DW_OP_shr;
15528 mem_loc_result = op0;
15529 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15530 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15531 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15532 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15533 }
15534 else if (!dwarf_strict || dwarf_version >= 5)
15535 {
15536 dw_die_ref type_die1, type_die2;
15537 dw_loc_descr_ref cvt;
15538
15539 type_die1 = base_type_for_mode (inner_mode,
15540 GET_CODE (rtl) == ZERO_EXTEND);
15541 if (type_die1 == NULL)
15542 break;
15543 type_die2 = base_type_for_mode (int_mode, 1);
15544 if (type_die2 == NULL)
15545 break;
15546 mem_loc_result = op0;
15547 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15548 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15549 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15550 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15551 add_loc_descr (&mem_loc_result, cvt);
15552 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15553 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15554 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15555 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15556 add_loc_descr (&mem_loc_result, cvt);
15557 }
15558 break;
15559
15560 case MEM:
15561 {
15562 rtx new_rtl = avoid_constant_pool_reference (rtl);
15563 if (new_rtl != rtl)
15564 {
15565 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15566 initialized);
15567 if (mem_loc_result != NULL)
15568 return mem_loc_result;
15569 }
15570 }
15571 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15572 get_address_mode (rtl), mode,
15573 VAR_INIT_STATUS_INITIALIZED);
15574 if (mem_loc_result == NULL)
15575 mem_loc_result = tls_mem_loc_descriptor (rtl);
15576 if (mem_loc_result != NULL)
15577 {
15578 if (!is_a <scalar_int_mode> (mode, &int_mode)
15579 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15580 {
15581 dw_die_ref type_die;
15582 dw_loc_descr_ref deref;
15583 HOST_WIDE_INT size;
15584
15585 if (dwarf_strict && dwarf_version < 5)
15586 return NULL;
15587 if (!GET_MODE_SIZE (mode).is_constant (&size))
15588 return NULL;
15589 type_die
15590 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15591 if (type_die == NULL)
15592 return NULL;
15593 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15594 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15595 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15596 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15597 add_loc_descr (&mem_loc_result, deref);
15598 }
15599 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15600 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15601 else
15602 add_loc_descr (&mem_loc_result,
15603 new_loc_descr (DW_OP_deref_size,
15604 GET_MODE_SIZE (int_mode), 0));
15605 }
15606 break;
15607
15608 case LO_SUM:
15609 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15610
15611 case LABEL_REF:
15612 /* Some ports can transform a symbol ref into a label ref, because
15613 the symbol ref is too far away and has to be dumped into a constant
15614 pool. */
15615 case CONST:
15616 case SYMBOL_REF:
15617 if (!is_a <scalar_int_mode> (mode, &int_mode)
15618 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15619 #ifdef POINTERS_EXTEND_UNSIGNED
15620 && (int_mode != Pmode || mem_mode == VOIDmode)
15621 #endif
15622 ))
15623 break;
15624 if (GET_CODE (rtl) == SYMBOL_REF
15625 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15626 {
15627 dw_loc_descr_ref temp;
15628
15629 /* If this is not defined, we have no way to emit the data. */
15630 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15631 break;
15632
15633 temp = new_addr_loc_descr (rtl, dtprel_true);
15634
15635 /* We check for DWARF 5 here because gdb did not implement
15636 DW_OP_form_tls_address until after 7.12. */
15637 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15638 ? DW_OP_form_tls_address
15639 : DW_OP_GNU_push_tls_address),
15640 0, 0);
15641 add_loc_descr (&mem_loc_result, temp);
15642
15643 break;
15644 }
15645
15646 if (!const_ok_for_output (rtl))
15647 {
15648 if (GET_CODE (rtl) == CONST)
15649 switch (GET_CODE (XEXP (rtl, 0)))
15650 {
15651 case NOT:
15652 op = DW_OP_not;
15653 goto try_const_unop;
15654 case NEG:
15655 op = DW_OP_neg;
15656 goto try_const_unop;
15657 try_const_unop:
15658 rtx arg;
15659 arg = XEXP (XEXP (rtl, 0), 0);
15660 if (!CONSTANT_P (arg))
15661 arg = gen_rtx_CONST (int_mode, arg);
15662 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15663 initialized);
15664 if (op0)
15665 {
15666 mem_loc_result = op0;
15667 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15668 }
15669 break;
15670 default:
15671 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15672 mem_mode, initialized);
15673 break;
15674 }
15675 break;
15676 }
15677
15678 symref:
15679 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15680 vec_safe_push (used_rtx_array, rtl);
15681 break;
15682
15683 case CONCAT:
15684 case CONCATN:
15685 case VAR_LOCATION:
15686 case DEBUG_IMPLICIT_PTR:
15687 expansion_failed (NULL_TREE, rtl,
15688 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15689 return 0;
15690
15691 case ENTRY_VALUE:
15692 if (dwarf_strict && dwarf_version < 5)
15693 return NULL;
15694 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15695 {
15696 if (!is_a <scalar_int_mode> (mode, &int_mode)
15697 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15698 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15699 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15700 else
15701 {
15702 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15703 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15704 return NULL;
15705 op0 = one_reg_loc_descriptor (dbx_regnum,
15706 VAR_INIT_STATUS_INITIALIZED);
15707 }
15708 }
15709 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15710 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15711 {
15712 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15713 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15714 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15715 return NULL;
15716 }
15717 else
15718 gcc_unreachable ();
15719 if (op0 == NULL)
15720 return NULL;
15721 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15722 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15723 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15724 break;
15725
15726 case DEBUG_PARAMETER_REF:
15727 mem_loc_result = parameter_ref_descriptor (rtl);
15728 break;
15729
15730 case PRE_MODIFY:
15731 /* Extract the PLUS expression nested inside and fall into
15732 PLUS code below. */
15733 rtl = XEXP (rtl, 1);
15734 goto plus;
15735
15736 case PRE_INC:
15737 case PRE_DEC:
15738 /* Turn these into a PLUS expression and fall into the PLUS code
15739 below. */
15740 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15741 gen_int_mode (GET_CODE (rtl) == PRE_INC
15742 ? GET_MODE_UNIT_SIZE (mem_mode)
15743 : -GET_MODE_UNIT_SIZE (mem_mode),
15744 mode));
15745
15746 /* fall through */
15747
15748 case PLUS:
15749 plus:
15750 if (is_based_loc (rtl)
15751 && is_a <scalar_int_mode> (mode, &int_mode)
15752 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15753 || XEXP (rtl, 0) == arg_pointer_rtx
15754 || XEXP (rtl, 0) == frame_pointer_rtx))
15755 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15756 INTVAL (XEXP (rtl, 1)),
15757 VAR_INIT_STATUS_INITIALIZED);
15758 else
15759 {
15760 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15761 VAR_INIT_STATUS_INITIALIZED);
15762 if (mem_loc_result == 0)
15763 break;
15764
15765 if (CONST_INT_P (XEXP (rtl, 1))
15766 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15767 <= DWARF2_ADDR_SIZE))
15768 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15769 else
15770 {
15771 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15772 VAR_INIT_STATUS_INITIALIZED);
15773 if (op1 == 0)
15774 return NULL;
15775 add_loc_descr (&mem_loc_result, op1);
15776 add_loc_descr (&mem_loc_result,
15777 new_loc_descr (DW_OP_plus, 0, 0));
15778 }
15779 }
15780 break;
15781
15782 /* If a pseudo-reg is optimized away, it is possible for it to
15783 be replaced with a MEM containing a multiply or shift. */
15784 case MINUS:
15785 op = DW_OP_minus;
15786 goto do_binop;
15787
15788 case MULT:
15789 op = DW_OP_mul;
15790 goto do_binop;
15791
15792 case DIV:
15793 if ((!dwarf_strict || dwarf_version >= 5)
15794 && is_a <scalar_int_mode> (mode, &int_mode)
15795 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15796 {
15797 mem_loc_result = typed_binop (DW_OP_div, rtl,
15798 base_type_for_mode (mode, 0),
15799 int_mode, mem_mode);
15800 break;
15801 }
15802 op = DW_OP_div;
15803 goto do_binop;
15804
15805 case UMOD:
15806 op = DW_OP_mod;
15807 goto do_binop;
15808
15809 case ASHIFT:
15810 op = DW_OP_shl;
15811 goto do_shift;
15812
15813 case ASHIFTRT:
15814 op = DW_OP_shra;
15815 goto do_shift;
15816
15817 case LSHIFTRT:
15818 op = DW_OP_shr;
15819 goto do_shift;
15820
15821 do_shift:
15822 if (!is_a <scalar_int_mode> (mode, &int_mode))
15823 break;
15824 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15825 VAR_INIT_STATUS_INITIALIZED);
15826 {
15827 rtx rtlop1 = XEXP (rtl, 1);
15828 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15829 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15830 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15831 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15832 VAR_INIT_STATUS_INITIALIZED);
15833 }
15834
15835 if (op0 == 0 || op1 == 0)
15836 break;
15837
15838 mem_loc_result = op0;
15839 add_loc_descr (&mem_loc_result, op1);
15840 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15841 break;
15842
15843 case AND:
15844 op = DW_OP_and;
15845 goto do_binop;
15846
15847 case IOR:
15848 op = DW_OP_or;
15849 goto do_binop;
15850
15851 case XOR:
15852 op = DW_OP_xor;
15853 goto do_binop;
15854
15855 do_binop:
15856 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15857 VAR_INIT_STATUS_INITIALIZED);
15858 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15859 VAR_INIT_STATUS_INITIALIZED);
15860
15861 if (op0 == 0 || op1 == 0)
15862 break;
15863
15864 mem_loc_result = op0;
15865 add_loc_descr (&mem_loc_result, op1);
15866 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15867 break;
15868
15869 case MOD:
15870 if ((!dwarf_strict || dwarf_version >= 5)
15871 && is_a <scalar_int_mode> (mode, &int_mode)
15872 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15873 {
15874 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15875 base_type_for_mode (mode, 0),
15876 int_mode, mem_mode);
15877 break;
15878 }
15879
15880 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15881 VAR_INIT_STATUS_INITIALIZED);
15882 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15883 VAR_INIT_STATUS_INITIALIZED);
15884
15885 if (op0 == 0 || op1 == 0)
15886 break;
15887
15888 mem_loc_result = op0;
15889 add_loc_descr (&mem_loc_result, op1);
15890 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15891 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15892 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15893 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15894 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15895 break;
15896
15897 case UDIV:
15898 if ((!dwarf_strict || dwarf_version >= 5)
15899 && is_a <scalar_int_mode> (mode, &int_mode))
15900 {
15901 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15902 {
15903 op = DW_OP_div;
15904 goto do_binop;
15905 }
15906 mem_loc_result = typed_binop (DW_OP_div, rtl,
15907 base_type_for_mode (int_mode, 1),
15908 int_mode, mem_mode);
15909 }
15910 break;
15911
15912 case NOT:
15913 op = DW_OP_not;
15914 goto do_unop;
15915
15916 case ABS:
15917 op = DW_OP_abs;
15918 goto do_unop;
15919
15920 case NEG:
15921 op = DW_OP_neg;
15922 goto do_unop;
15923
15924 do_unop:
15925 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15926 VAR_INIT_STATUS_INITIALIZED);
15927
15928 if (op0 == 0)
15929 break;
15930
15931 mem_loc_result = op0;
15932 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15933 break;
15934
15935 case CONST_INT:
15936 if (!is_a <scalar_int_mode> (mode, &int_mode)
15937 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15938 #ifdef POINTERS_EXTEND_UNSIGNED
15939 || (int_mode == Pmode
15940 && mem_mode != VOIDmode
15941 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15942 #endif
15943 )
15944 {
15945 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15946 break;
15947 }
15948 if ((!dwarf_strict || dwarf_version >= 5)
15949 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15950 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15951 {
15952 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15953 scalar_int_mode amode;
15954 if (type_die == NULL)
15955 return NULL;
15956 if (INTVAL (rtl) >= 0
15957 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15958 .exists (&amode))
15959 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15960 /* const DW_OP_convert <XXX> vs.
15961 DW_OP_const_type <XXX, 1, const>. */
15962 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15963 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15964 {
15965 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15966 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15967 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15968 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15969 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15970 add_loc_descr (&mem_loc_result, op0);
15971 return mem_loc_result;
15972 }
15973 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15974 INTVAL (rtl));
15975 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15976 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15977 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15978 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
15979 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
15980 else
15981 {
15982 mem_loc_result->dw_loc_oprnd2.val_class
15983 = dw_val_class_const_double;
15984 mem_loc_result->dw_loc_oprnd2.v.val_double
15985 = double_int::from_shwi (INTVAL (rtl));
15986 }
15987 }
15988 break;
15989
15990 case CONST_DOUBLE:
15991 if (!dwarf_strict || dwarf_version >= 5)
15992 {
15993 dw_die_ref type_die;
15994
15995 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
15996 CONST_DOUBLE rtx could represent either a large integer
15997 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
15998 the value is always a floating point constant.
15999
16000 When it is an integer, a CONST_DOUBLE is used whenever
16001 the constant requires 2 HWIs to be adequately represented.
16002 We output CONST_DOUBLEs as blocks. */
16003 if (mode == VOIDmode
16004 || (GET_MODE (rtl) == VOIDmode
16005 && maybe_ne (GET_MODE_BITSIZE (mode),
16006 HOST_BITS_PER_DOUBLE_INT)))
16007 break;
16008 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16009 if (type_die == NULL)
16010 return NULL;
16011 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16012 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16013 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16014 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16015 #if TARGET_SUPPORTS_WIDE_INT == 0
16016 if (!SCALAR_FLOAT_MODE_P (mode))
16017 {
16018 mem_loc_result->dw_loc_oprnd2.val_class
16019 = dw_val_class_const_double;
16020 mem_loc_result->dw_loc_oprnd2.v.val_double
16021 = rtx_to_double_int (rtl);
16022 }
16023 else
16024 #endif
16025 {
16026 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16027 unsigned int length = GET_MODE_SIZE (float_mode);
16028 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16029
16030 insert_float (rtl, array);
16031 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16032 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16033 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16034 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16035 }
16036 }
16037 break;
16038
16039 case CONST_WIDE_INT:
16040 if (!dwarf_strict || dwarf_version >= 5)
16041 {
16042 dw_die_ref type_die;
16043
16044 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16045 if (type_die == NULL)
16046 return NULL;
16047 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16048 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16049 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16050 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16051 mem_loc_result->dw_loc_oprnd2.val_class
16052 = dw_val_class_wide_int;
16053 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16054 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16055 }
16056 break;
16057
16058 case CONST_POLY_INT:
16059 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16060 break;
16061
16062 case EQ:
16063 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16064 break;
16065
16066 case GE:
16067 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16068 break;
16069
16070 case GT:
16071 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16072 break;
16073
16074 case LE:
16075 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16076 break;
16077
16078 case LT:
16079 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16080 break;
16081
16082 case NE:
16083 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16084 break;
16085
16086 case GEU:
16087 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16088 break;
16089
16090 case GTU:
16091 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16092 break;
16093
16094 case LEU:
16095 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16096 break;
16097
16098 case LTU:
16099 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16100 break;
16101
16102 case UMIN:
16103 case UMAX:
16104 if (!SCALAR_INT_MODE_P (mode))
16105 break;
16106 /* FALLTHRU */
16107 case SMIN:
16108 case SMAX:
16109 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16110 break;
16111
16112 case ZERO_EXTRACT:
16113 case SIGN_EXTRACT:
16114 if (CONST_INT_P (XEXP (rtl, 1))
16115 && CONST_INT_P (XEXP (rtl, 2))
16116 && is_a <scalar_int_mode> (mode, &int_mode)
16117 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16118 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16119 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16120 && ((unsigned) INTVAL (XEXP (rtl, 1))
16121 + (unsigned) INTVAL (XEXP (rtl, 2))
16122 <= GET_MODE_BITSIZE (int_mode)))
16123 {
16124 int shift, size;
16125 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16126 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16127 if (op0 == 0)
16128 break;
16129 if (GET_CODE (rtl) == SIGN_EXTRACT)
16130 op = DW_OP_shra;
16131 else
16132 op = DW_OP_shr;
16133 mem_loc_result = op0;
16134 size = INTVAL (XEXP (rtl, 1));
16135 shift = INTVAL (XEXP (rtl, 2));
16136 if (BITS_BIG_ENDIAN)
16137 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16138 if (shift + size != (int) DWARF2_ADDR_SIZE)
16139 {
16140 add_loc_descr (&mem_loc_result,
16141 int_loc_descriptor (DWARF2_ADDR_SIZE
16142 - shift - size));
16143 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16144 }
16145 if (size != (int) DWARF2_ADDR_SIZE)
16146 {
16147 add_loc_descr (&mem_loc_result,
16148 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16149 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16150 }
16151 }
16152 break;
16153
16154 case IF_THEN_ELSE:
16155 {
16156 dw_loc_descr_ref op2, bra_node, drop_node;
16157 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16158 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16159 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16160 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16161 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16162 VAR_INIT_STATUS_INITIALIZED);
16163 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16164 VAR_INIT_STATUS_INITIALIZED);
16165 if (op0 == NULL || op1 == NULL || op2 == NULL)
16166 break;
16167
16168 mem_loc_result = op1;
16169 add_loc_descr (&mem_loc_result, op2);
16170 add_loc_descr (&mem_loc_result, op0);
16171 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16172 add_loc_descr (&mem_loc_result, bra_node);
16173 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16174 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16175 add_loc_descr (&mem_loc_result, drop_node);
16176 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16177 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16178 }
16179 break;
16180
16181 case FLOAT_EXTEND:
16182 case FLOAT_TRUNCATE:
16183 case FLOAT:
16184 case UNSIGNED_FLOAT:
16185 case FIX:
16186 case UNSIGNED_FIX:
16187 if (!dwarf_strict || dwarf_version >= 5)
16188 {
16189 dw_die_ref type_die;
16190 dw_loc_descr_ref cvt;
16191
16192 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16193 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16194 if (op0 == NULL)
16195 break;
16196 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16197 && (GET_CODE (rtl) == FLOAT
16198 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16199 {
16200 type_die = base_type_for_mode (int_mode,
16201 GET_CODE (rtl) == UNSIGNED_FLOAT);
16202 if (type_die == NULL)
16203 break;
16204 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16205 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16206 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16207 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16208 add_loc_descr (&op0, cvt);
16209 }
16210 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16211 if (type_die == NULL)
16212 break;
16213 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16214 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16215 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16216 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16217 add_loc_descr (&op0, cvt);
16218 if (is_a <scalar_int_mode> (mode, &int_mode)
16219 && (GET_CODE (rtl) == FIX
16220 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16221 {
16222 op0 = convert_descriptor_to_mode (int_mode, op0);
16223 if (op0 == NULL)
16224 break;
16225 }
16226 mem_loc_result = op0;
16227 }
16228 break;
16229
16230 case CLZ:
16231 case CTZ:
16232 case FFS:
16233 if (is_a <scalar_int_mode> (mode, &int_mode))
16234 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16235 break;
16236
16237 case POPCOUNT:
16238 case PARITY:
16239 if (is_a <scalar_int_mode> (mode, &int_mode))
16240 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16241 break;
16242
16243 case BSWAP:
16244 if (is_a <scalar_int_mode> (mode, &int_mode))
16245 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16246 break;
16247
16248 case ROTATE:
16249 case ROTATERT:
16250 if (is_a <scalar_int_mode> (mode, &int_mode))
16251 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16252 break;
16253
16254 case COMPARE:
16255 /* In theory, we could implement the above. */
16256 /* DWARF cannot represent the unsigned compare operations
16257 natively. */
16258 case SS_MULT:
16259 case US_MULT:
16260 case SS_DIV:
16261 case US_DIV:
16262 case SS_PLUS:
16263 case US_PLUS:
16264 case SS_MINUS:
16265 case US_MINUS:
16266 case SS_NEG:
16267 case US_NEG:
16268 case SS_ABS:
16269 case SS_ASHIFT:
16270 case US_ASHIFT:
16271 case SS_TRUNCATE:
16272 case US_TRUNCATE:
16273 case UNORDERED:
16274 case ORDERED:
16275 case UNEQ:
16276 case UNGE:
16277 case UNGT:
16278 case UNLE:
16279 case UNLT:
16280 case LTGT:
16281 case FRACT_CONVERT:
16282 case UNSIGNED_FRACT_CONVERT:
16283 case SAT_FRACT:
16284 case UNSIGNED_SAT_FRACT:
16285 case SQRT:
16286 case ASM_OPERANDS:
16287 case VEC_MERGE:
16288 case VEC_SELECT:
16289 case VEC_CONCAT:
16290 case VEC_DUPLICATE:
16291 case VEC_SERIES:
16292 case UNSPEC:
16293 case HIGH:
16294 case FMA:
16295 case STRICT_LOW_PART:
16296 case CONST_VECTOR:
16297 case CONST_FIXED:
16298 case CLRSB:
16299 case CLOBBER:
16300 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16301 can't express it in the debug info. This can happen e.g. with some
16302 TLS UNSPECs. */
16303 break;
16304
16305 case CONST_STRING:
16306 resolve_one_addr (&rtl);
16307 goto symref;
16308
16309 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16310 the expression. An UNSPEC rtx represents a raw DWARF operation,
16311 new_loc_descr is called for it to build the operation directly.
16312 Otherwise mem_loc_descriptor is called recursively. */
16313 case PARALLEL:
16314 {
16315 int index = 0;
16316 dw_loc_descr_ref exp_result = NULL;
16317
16318 for (; index < XVECLEN (rtl, 0); index++)
16319 {
16320 rtx elem = XVECEXP (rtl, 0, index);
16321 if (GET_CODE (elem) == UNSPEC)
16322 {
16323 /* Each DWARF operation UNSPEC contain two operands, if
16324 one operand is not used for the operation, const0_rtx is
16325 passed. */
16326 gcc_assert (XVECLEN (elem, 0) == 2);
16327
16328 HOST_WIDE_INT dw_op = XINT (elem, 1);
16329 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16330 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16331 exp_result
16332 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16333 oprnd2);
16334 }
16335 else
16336 exp_result
16337 = mem_loc_descriptor (elem, mode, mem_mode,
16338 VAR_INIT_STATUS_INITIALIZED);
16339
16340 if (!mem_loc_result)
16341 mem_loc_result = exp_result;
16342 else
16343 add_loc_descr (&mem_loc_result, exp_result);
16344 }
16345
16346 break;
16347 }
16348
16349 default:
16350 if (flag_checking)
16351 {
16352 print_rtl (stderr, rtl);
16353 gcc_unreachable ();
16354 }
16355 break;
16356 }
16357
16358 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16359 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16360
16361 return mem_loc_result;
16362 }
16363
16364 /* Return a descriptor that describes the concatenation of two locations.
16365 This is typically a complex variable. */
16366
16367 static dw_loc_descr_ref
16368 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16369 {
16370 /* At present we only track constant-sized pieces. */
16371 unsigned int size0, size1;
16372 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16373 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16374 return 0;
16375
16376 dw_loc_descr_ref cc_loc_result = NULL;
16377 dw_loc_descr_ref x0_ref
16378 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16379 dw_loc_descr_ref x1_ref
16380 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16381
16382 if (x0_ref == 0 || x1_ref == 0)
16383 return 0;
16384
16385 cc_loc_result = x0_ref;
16386 add_loc_descr_op_piece (&cc_loc_result, size0);
16387
16388 add_loc_descr (&cc_loc_result, x1_ref);
16389 add_loc_descr_op_piece (&cc_loc_result, size1);
16390
16391 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16392 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16393
16394 return cc_loc_result;
16395 }
16396
16397 /* Return a descriptor that describes the concatenation of N
16398 locations. */
16399
16400 static dw_loc_descr_ref
16401 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16402 {
16403 unsigned int i;
16404 dw_loc_descr_ref cc_loc_result = NULL;
16405 unsigned int n = XVECLEN (concatn, 0);
16406 unsigned int size;
16407
16408 for (i = 0; i < n; ++i)
16409 {
16410 dw_loc_descr_ref ref;
16411 rtx x = XVECEXP (concatn, 0, i);
16412
16413 /* At present we only track constant-sized pieces. */
16414 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16415 return NULL;
16416
16417 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16418 if (ref == NULL)
16419 return NULL;
16420
16421 add_loc_descr (&cc_loc_result, ref);
16422 add_loc_descr_op_piece (&cc_loc_result, size);
16423 }
16424
16425 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16426 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16427
16428 return cc_loc_result;
16429 }
16430
16431 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16432 for DEBUG_IMPLICIT_PTR RTL. */
16433
16434 static dw_loc_descr_ref
16435 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16436 {
16437 dw_loc_descr_ref ret;
16438 dw_die_ref ref;
16439
16440 if (dwarf_strict && dwarf_version < 5)
16441 return NULL;
16442 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16443 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16444 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16445 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16446 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16447 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16448 if (ref)
16449 {
16450 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16451 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16452 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16453 }
16454 else
16455 {
16456 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16457 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16458 }
16459 return ret;
16460 }
16461
16462 /* Output a proper Dwarf location descriptor for a variable or parameter
16463 which is either allocated in a register or in a memory location. For a
16464 register, we just generate an OP_REG and the register number. For a
16465 memory location we provide a Dwarf postfix expression describing how to
16466 generate the (dynamic) address of the object onto the address stack.
16467
16468 MODE is mode of the decl if this loc_descriptor is going to be used in
16469 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16470 allowed, VOIDmode otherwise.
16471
16472 If we don't know how to describe it, return 0. */
16473
16474 static dw_loc_descr_ref
16475 loc_descriptor (rtx rtl, machine_mode mode,
16476 enum var_init_status initialized)
16477 {
16478 dw_loc_descr_ref loc_result = NULL;
16479 scalar_int_mode int_mode;
16480
16481 switch (GET_CODE (rtl))
16482 {
16483 case SUBREG:
16484 /* The case of a subreg may arise when we have a local (register)
16485 variable or a formal (register) parameter which doesn't quite fill
16486 up an entire register. For now, just assume that it is
16487 legitimate to make the Dwarf info refer to the whole register which
16488 contains the given subreg. */
16489 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16490 loc_result = loc_descriptor (SUBREG_REG (rtl),
16491 GET_MODE (SUBREG_REG (rtl)), initialized);
16492 else
16493 goto do_default;
16494 break;
16495
16496 case REG:
16497 loc_result = reg_loc_descriptor (rtl, initialized);
16498 break;
16499
16500 case MEM:
16501 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16502 GET_MODE (rtl), initialized);
16503 if (loc_result == NULL)
16504 loc_result = tls_mem_loc_descriptor (rtl);
16505 if (loc_result == NULL)
16506 {
16507 rtx new_rtl = avoid_constant_pool_reference (rtl);
16508 if (new_rtl != rtl)
16509 loc_result = loc_descriptor (new_rtl, mode, initialized);
16510 }
16511 break;
16512
16513 case CONCAT:
16514 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16515 initialized);
16516 break;
16517
16518 case CONCATN:
16519 loc_result = concatn_loc_descriptor (rtl, initialized);
16520 break;
16521
16522 case VAR_LOCATION:
16523 /* Single part. */
16524 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16525 {
16526 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16527 if (GET_CODE (loc) == EXPR_LIST)
16528 loc = XEXP (loc, 0);
16529 loc_result = loc_descriptor (loc, mode, initialized);
16530 break;
16531 }
16532
16533 rtl = XEXP (rtl, 1);
16534 /* FALLTHRU */
16535
16536 case PARALLEL:
16537 {
16538 rtvec par_elems = XVEC (rtl, 0);
16539 int num_elem = GET_NUM_ELEM (par_elems);
16540 machine_mode mode;
16541 int i, size;
16542
16543 /* Create the first one, so we have something to add to. */
16544 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16545 VOIDmode, initialized);
16546 if (loc_result == NULL)
16547 return NULL;
16548 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16549 /* At present we only track constant-sized pieces. */
16550 if (!GET_MODE_SIZE (mode).is_constant (&size))
16551 return NULL;
16552 add_loc_descr_op_piece (&loc_result, size);
16553 for (i = 1; i < num_elem; i++)
16554 {
16555 dw_loc_descr_ref temp;
16556
16557 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16558 VOIDmode, initialized);
16559 if (temp == NULL)
16560 return NULL;
16561 add_loc_descr (&loc_result, temp);
16562 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16563 /* At present we only track constant-sized pieces. */
16564 if (!GET_MODE_SIZE (mode).is_constant (&size))
16565 return NULL;
16566 add_loc_descr_op_piece (&loc_result, size);
16567 }
16568 }
16569 break;
16570
16571 case CONST_INT:
16572 if (mode != VOIDmode && mode != BLKmode)
16573 {
16574 int_mode = as_a <scalar_int_mode> (mode);
16575 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16576 INTVAL (rtl));
16577 }
16578 break;
16579
16580 case CONST_DOUBLE:
16581 if (mode == VOIDmode)
16582 mode = GET_MODE (rtl);
16583
16584 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16585 {
16586 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16587
16588 /* Note that a CONST_DOUBLE rtx could represent either an integer
16589 or a floating-point constant. A CONST_DOUBLE is used whenever
16590 the constant requires more than one word in order to be
16591 adequately represented. We output CONST_DOUBLEs as blocks. */
16592 scalar_mode smode = as_a <scalar_mode> (mode);
16593 loc_result = new_loc_descr (DW_OP_implicit_value,
16594 GET_MODE_SIZE (smode), 0);
16595 #if TARGET_SUPPORTS_WIDE_INT == 0
16596 if (!SCALAR_FLOAT_MODE_P (smode))
16597 {
16598 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16599 loc_result->dw_loc_oprnd2.v.val_double
16600 = rtx_to_double_int (rtl);
16601 }
16602 else
16603 #endif
16604 {
16605 unsigned int length = GET_MODE_SIZE (smode);
16606 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16607
16608 insert_float (rtl, array);
16609 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16610 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16611 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16612 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16613 }
16614 }
16615 break;
16616
16617 case CONST_WIDE_INT:
16618 if (mode == VOIDmode)
16619 mode = GET_MODE (rtl);
16620
16621 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16622 {
16623 int_mode = as_a <scalar_int_mode> (mode);
16624 loc_result = new_loc_descr (DW_OP_implicit_value,
16625 GET_MODE_SIZE (int_mode), 0);
16626 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16627 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16628 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16629 }
16630 break;
16631
16632 case CONST_VECTOR:
16633 if (mode == VOIDmode)
16634 mode = GET_MODE (rtl);
16635
16636 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16637 {
16638 unsigned int length;
16639 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16640 return NULL;
16641
16642 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16643 unsigned char *array
16644 = ggc_vec_alloc<unsigned char> (length * elt_size);
16645 unsigned int i;
16646 unsigned char *p;
16647 machine_mode imode = GET_MODE_INNER (mode);
16648
16649 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16650 switch (GET_MODE_CLASS (mode))
16651 {
16652 case MODE_VECTOR_INT:
16653 for (i = 0, p = array; i < length; i++, p += elt_size)
16654 {
16655 rtx elt = CONST_VECTOR_ELT (rtl, i);
16656 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16657 }
16658 break;
16659
16660 case MODE_VECTOR_FLOAT:
16661 for (i = 0, p = array; i < length; i++, p += elt_size)
16662 {
16663 rtx elt = CONST_VECTOR_ELT (rtl, i);
16664 insert_float (elt, p);
16665 }
16666 break;
16667
16668 default:
16669 gcc_unreachable ();
16670 }
16671
16672 loc_result = new_loc_descr (DW_OP_implicit_value,
16673 length * elt_size, 0);
16674 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16675 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16676 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16677 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16678 }
16679 break;
16680
16681 case CONST:
16682 if (mode == VOIDmode
16683 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16684 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16685 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16686 {
16687 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16688 break;
16689 }
16690 /* FALLTHROUGH */
16691 case SYMBOL_REF:
16692 if (!const_ok_for_output (rtl))
16693 break;
16694 /* FALLTHROUGH */
16695 case LABEL_REF:
16696 if (is_a <scalar_int_mode> (mode, &int_mode)
16697 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16698 && (dwarf_version >= 4 || !dwarf_strict))
16699 {
16700 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16701 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16702 vec_safe_push (used_rtx_array, rtl);
16703 }
16704 break;
16705
16706 case DEBUG_IMPLICIT_PTR:
16707 loc_result = implicit_ptr_descriptor (rtl, 0);
16708 break;
16709
16710 case PLUS:
16711 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16712 && CONST_INT_P (XEXP (rtl, 1)))
16713 {
16714 loc_result
16715 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16716 break;
16717 }
16718 /* FALLTHRU */
16719 do_default:
16720 default:
16721 if ((is_a <scalar_int_mode> (mode, &int_mode)
16722 && GET_MODE (rtl) == int_mode
16723 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16724 && dwarf_version >= 4)
16725 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16726 {
16727 /* Value expression. */
16728 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16729 if (loc_result)
16730 add_loc_descr (&loc_result,
16731 new_loc_descr (DW_OP_stack_value, 0, 0));
16732 }
16733 break;
16734 }
16735
16736 return loc_result;
16737 }
16738
16739 /* We need to figure out what section we should use as the base for the
16740 address ranges where a given location is valid.
16741 1. If this particular DECL has a section associated with it, use that.
16742 2. If this function has a section associated with it, use that.
16743 3. Otherwise, use the text section.
16744 XXX: If you split a variable across multiple sections, we won't notice. */
16745
16746 static const char *
16747 secname_for_decl (const_tree decl)
16748 {
16749 const char *secname;
16750
16751 if (VAR_OR_FUNCTION_DECL_P (decl)
16752 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16753 && DECL_SECTION_NAME (decl))
16754 secname = DECL_SECTION_NAME (decl);
16755 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16756 secname = DECL_SECTION_NAME (current_function_decl);
16757 else if (cfun && in_cold_section_p)
16758 secname = crtl->subsections.cold_section_label;
16759 else
16760 secname = text_section_label;
16761
16762 return secname;
16763 }
16764
16765 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16766
16767 static bool
16768 decl_by_reference_p (tree decl)
16769 {
16770 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16771 || VAR_P (decl))
16772 && DECL_BY_REFERENCE (decl));
16773 }
16774
16775 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16776 for VARLOC. */
16777
16778 static dw_loc_descr_ref
16779 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16780 enum var_init_status initialized)
16781 {
16782 int have_address = 0;
16783 dw_loc_descr_ref descr;
16784 machine_mode mode;
16785
16786 if (want_address != 2)
16787 {
16788 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16789 /* Single part. */
16790 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16791 {
16792 varloc = PAT_VAR_LOCATION_LOC (varloc);
16793 if (GET_CODE (varloc) == EXPR_LIST)
16794 varloc = XEXP (varloc, 0);
16795 mode = GET_MODE (varloc);
16796 if (MEM_P (varloc))
16797 {
16798 rtx addr = XEXP (varloc, 0);
16799 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16800 mode, initialized);
16801 if (descr)
16802 have_address = 1;
16803 else
16804 {
16805 rtx x = avoid_constant_pool_reference (varloc);
16806 if (x != varloc)
16807 descr = mem_loc_descriptor (x, mode, VOIDmode,
16808 initialized);
16809 }
16810 }
16811 else
16812 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16813 }
16814 else
16815 return 0;
16816 }
16817 else
16818 {
16819 if (GET_CODE (varloc) == VAR_LOCATION)
16820 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16821 else
16822 mode = DECL_MODE (loc);
16823 descr = loc_descriptor (varloc, mode, initialized);
16824 have_address = 1;
16825 }
16826
16827 if (!descr)
16828 return 0;
16829
16830 if (want_address == 2 && !have_address
16831 && (dwarf_version >= 4 || !dwarf_strict))
16832 {
16833 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16834 {
16835 expansion_failed (loc, NULL_RTX,
16836 "DWARF address size mismatch");
16837 return 0;
16838 }
16839 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16840 have_address = 1;
16841 }
16842 /* Show if we can't fill the request for an address. */
16843 if (want_address && !have_address)
16844 {
16845 expansion_failed (loc, NULL_RTX,
16846 "Want address and only have value");
16847 return 0;
16848 }
16849
16850 /* If we've got an address and don't want one, dereference. */
16851 if (!want_address && have_address)
16852 {
16853 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16854 enum dwarf_location_atom op;
16855
16856 if (size > DWARF2_ADDR_SIZE || size == -1)
16857 {
16858 expansion_failed (loc, NULL_RTX,
16859 "DWARF address size mismatch");
16860 return 0;
16861 }
16862 else if (size == DWARF2_ADDR_SIZE)
16863 op = DW_OP_deref;
16864 else
16865 op = DW_OP_deref_size;
16866
16867 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16868 }
16869
16870 return descr;
16871 }
16872
16873 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16874 if it is not possible. */
16875
16876 static dw_loc_descr_ref
16877 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16878 {
16879 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16880 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16881 else if (dwarf_version >= 3 || !dwarf_strict)
16882 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16883 else
16884 return NULL;
16885 }
16886
16887 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16888 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16889
16890 static dw_loc_descr_ref
16891 dw_sra_loc_expr (tree decl, rtx loc)
16892 {
16893 rtx p;
16894 unsigned HOST_WIDE_INT padsize = 0;
16895 dw_loc_descr_ref descr, *descr_tail;
16896 unsigned HOST_WIDE_INT decl_size;
16897 rtx varloc;
16898 enum var_init_status initialized;
16899
16900 if (DECL_SIZE (decl) == NULL
16901 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16902 return NULL;
16903
16904 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16905 descr = NULL;
16906 descr_tail = &descr;
16907
16908 for (p = loc; p; p = XEXP (p, 1))
16909 {
16910 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16911 rtx loc_note = *decl_piece_varloc_ptr (p);
16912 dw_loc_descr_ref cur_descr;
16913 dw_loc_descr_ref *tail, last = NULL;
16914 unsigned HOST_WIDE_INT opsize = 0;
16915
16916 if (loc_note == NULL_RTX
16917 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16918 {
16919 padsize += bitsize;
16920 continue;
16921 }
16922 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16923 varloc = NOTE_VAR_LOCATION (loc_note);
16924 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16925 if (cur_descr == NULL)
16926 {
16927 padsize += bitsize;
16928 continue;
16929 }
16930
16931 /* Check that cur_descr either doesn't use
16932 DW_OP_*piece operations, or their sum is equal
16933 to bitsize. Otherwise we can't embed it. */
16934 for (tail = &cur_descr; *tail != NULL;
16935 tail = &(*tail)->dw_loc_next)
16936 if ((*tail)->dw_loc_opc == DW_OP_piece)
16937 {
16938 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16939 * BITS_PER_UNIT;
16940 last = *tail;
16941 }
16942 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16943 {
16944 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16945 last = *tail;
16946 }
16947
16948 if (last != NULL && opsize != bitsize)
16949 {
16950 padsize += bitsize;
16951 /* Discard the current piece of the descriptor and release any
16952 addr_table entries it uses. */
16953 remove_loc_list_addr_table_entries (cur_descr);
16954 continue;
16955 }
16956
16957 /* If there is a hole, add DW_OP_*piece after empty DWARF
16958 expression, which means that those bits are optimized out. */
16959 if (padsize)
16960 {
16961 if (padsize > decl_size)
16962 {
16963 remove_loc_list_addr_table_entries (cur_descr);
16964 goto discard_descr;
16965 }
16966 decl_size -= padsize;
16967 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16968 if (*descr_tail == NULL)
16969 {
16970 remove_loc_list_addr_table_entries (cur_descr);
16971 goto discard_descr;
16972 }
16973 descr_tail = &(*descr_tail)->dw_loc_next;
16974 padsize = 0;
16975 }
16976 *descr_tail = cur_descr;
16977 descr_tail = tail;
16978 if (bitsize > decl_size)
16979 goto discard_descr;
16980 decl_size -= bitsize;
16981 if (last == NULL)
16982 {
16983 HOST_WIDE_INT offset = 0;
16984 if (GET_CODE (varloc) == VAR_LOCATION
16985 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16986 {
16987 varloc = PAT_VAR_LOCATION_LOC (varloc);
16988 if (GET_CODE (varloc) == EXPR_LIST)
16989 varloc = XEXP (varloc, 0);
16990 }
16991 do
16992 {
16993 if (GET_CODE (varloc) == CONST
16994 || GET_CODE (varloc) == SIGN_EXTEND
16995 || GET_CODE (varloc) == ZERO_EXTEND)
16996 varloc = XEXP (varloc, 0);
16997 else if (GET_CODE (varloc) == SUBREG)
16998 varloc = SUBREG_REG (varloc);
16999 else
17000 break;
17001 }
17002 while (1);
17003 /* DW_OP_bit_size offset should be zero for register
17004 or implicit location descriptions and empty location
17005 descriptions, but for memory addresses needs big endian
17006 adjustment. */
17007 if (MEM_P (varloc))
17008 {
17009 unsigned HOST_WIDE_INT memsize;
17010 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17011 goto discard_descr;
17012 memsize *= BITS_PER_UNIT;
17013 if (memsize != bitsize)
17014 {
17015 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17016 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17017 goto discard_descr;
17018 if (memsize < bitsize)
17019 goto discard_descr;
17020 if (BITS_BIG_ENDIAN)
17021 offset = memsize - bitsize;
17022 }
17023 }
17024
17025 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17026 if (*descr_tail == NULL)
17027 goto discard_descr;
17028 descr_tail = &(*descr_tail)->dw_loc_next;
17029 }
17030 }
17031
17032 /* If there were any non-empty expressions, add padding till the end of
17033 the decl. */
17034 if (descr != NULL && decl_size != 0)
17035 {
17036 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17037 if (*descr_tail == NULL)
17038 goto discard_descr;
17039 }
17040 return descr;
17041
17042 discard_descr:
17043 /* Discard the descriptor and release any addr_table entries it uses. */
17044 remove_loc_list_addr_table_entries (descr);
17045 return NULL;
17046 }
17047
17048 /* Return the dwarf representation of the location list LOC_LIST of
17049 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17050 function. */
17051
17052 static dw_loc_list_ref
17053 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17054 {
17055 const char *endname, *secname;
17056 var_loc_view endview;
17057 rtx varloc;
17058 enum var_init_status initialized;
17059 struct var_loc_node *node;
17060 dw_loc_descr_ref descr;
17061 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17062 dw_loc_list_ref list = NULL;
17063 dw_loc_list_ref *listp = &list;
17064
17065 /* Now that we know what section we are using for a base,
17066 actually construct the list of locations.
17067 The first location information is what is passed to the
17068 function that creates the location list, and the remaining
17069 locations just get added on to that list.
17070 Note that we only know the start address for a location
17071 (IE location changes), so to build the range, we use
17072 the range [current location start, next location start].
17073 This means we have to special case the last node, and generate
17074 a range of [last location start, end of function label]. */
17075
17076 if (cfun && crtl->has_bb_partition)
17077 {
17078 bool save_in_cold_section_p = in_cold_section_p;
17079 in_cold_section_p = first_function_block_is_cold;
17080 if (loc_list->last_before_switch == NULL)
17081 in_cold_section_p = !in_cold_section_p;
17082 secname = secname_for_decl (decl);
17083 in_cold_section_p = save_in_cold_section_p;
17084 }
17085 else
17086 secname = secname_for_decl (decl);
17087
17088 for (node = loc_list->first; node; node = node->next)
17089 {
17090 bool range_across_switch = false;
17091 if (GET_CODE (node->loc) == EXPR_LIST
17092 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17093 {
17094 if (GET_CODE (node->loc) == EXPR_LIST)
17095 {
17096 descr = NULL;
17097 /* This requires DW_OP_{,bit_}piece, which is not usable
17098 inside DWARF expressions. */
17099 if (want_address == 2)
17100 descr = dw_sra_loc_expr (decl, node->loc);
17101 }
17102 else
17103 {
17104 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17105 varloc = NOTE_VAR_LOCATION (node->loc);
17106 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17107 }
17108 if (descr)
17109 {
17110 /* If section switch happens in between node->label
17111 and node->next->label (or end of function) and
17112 we can't emit it as a single entry list,
17113 emit two ranges, first one ending at the end
17114 of first partition and second one starting at the
17115 beginning of second partition. */
17116 if (node == loc_list->last_before_switch
17117 && (node != loc_list->first || loc_list->first->next)
17118 && current_function_decl)
17119 {
17120 endname = cfun->fde->dw_fde_end;
17121 endview = 0;
17122 range_across_switch = true;
17123 }
17124 /* The variable has a location between NODE->LABEL and
17125 NODE->NEXT->LABEL. */
17126 else if (node->next)
17127 endname = node->next->label, endview = node->next->view;
17128 /* If the variable has a location at the last label
17129 it keeps its location until the end of function. */
17130 else if (!current_function_decl)
17131 endname = text_end_label, endview = 0;
17132 else
17133 {
17134 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17135 current_function_funcdef_no);
17136 endname = ggc_strdup (label_id);
17137 endview = 0;
17138 }
17139
17140 *listp = new_loc_list (descr, node->label, node->view,
17141 endname, endview, secname);
17142 if (TREE_CODE (decl) == PARM_DECL
17143 && node == loc_list->first
17144 && NOTE_P (node->loc)
17145 && strcmp (node->label, endname) == 0)
17146 (*listp)->force = true;
17147 listp = &(*listp)->dw_loc_next;
17148 }
17149 }
17150
17151 if (cfun
17152 && crtl->has_bb_partition
17153 && node == loc_list->last_before_switch)
17154 {
17155 bool save_in_cold_section_p = in_cold_section_p;
17156 in_cold_section_p = !first_function_block_is_cold;
17157 secname = secname_for_decl (decl);
17158 in_cold_section_p = save_in_cold_section_p;
17159 }
17160
17161 if (range_across_switch)
17162 {
17163 if (GET_CODE (node->loc) == EXPR_LIST)
17164 descr = dw_sra_loc_expr (decl, node->loc);
17165 else
17166 {
17167 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17168 varloc = NOTE_VAR_LOCATION (node->loc);
17169 descr = dw_loc_list_1 (decl, varloc, want_address,
17170 initialized);
17171 }
17172 gcc_assert (descr);
17173 /* The variable has a location between NODE->LABEL and
17174 NODE->NEXT->LABEL. */
17175 if (node->next)
17176 endname = node->next->label, endview = node->next->view;
17177 else
17178 endname = cfun->fde->dw_fde_second_end, endview = 0;
17179 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17180 endname, endview, secname);
17181 listp = &(*listp)->dw_loc_next;
17182 }
17183 }
17184
17185 /* Try to avoid the overhead of a location list emitting a location
17186 expression instead, but only if we didn't have more than one
17187 location entry in the first place. If some entries were not
17188 representable, we don't want to pretend a single entry that was
17189 applies to the entire scope in which the variable is
17190 available. */
17191 if (list && loc_list->first->next)
17192 gen_llsym (list);
17193 else
17194 maybe_gen_llsym (list);
17195
17196 return list;
17197 }
17198
17199 /* Return if the loc_list has only single element and thus can be represented
17200 as location description. */
17201
17202 static bool
17203 single_element_loc_list_p (dw_loc_list_ref list)
17204 {
17205 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17206 return !list->ll_symbol;
17207 }
17208
17209 /* Duplicate a single element of location list. */
17210
17211 static inline dw_loc_descr_ref
17212 copy_loc_descr (dw_loc_descr_ref ref)
17213 {
17214 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17215 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17216 return copy;
17217 }
17218
17219 /* To each location in list LIST append loc descr REF. */
17220
17221 static void
17222 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17223 {
17224 dw_loc_descr_ref copy;
17225 add_loc_descr (&list->expr, ref);
17226 list = list->dw_loc_next;
17227 while (list)
17228 {
17229 copy = copy_loc_descr (ref);
17230 add_loc_descr (&list->expr, copy);
17231 while (copy->dw_loc_next)
17232 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17233 list = list->dw_loc_next;
17234 }
17235 }
17236
17237 /* To each location in list LIST prepend loc descr REF. */
17238
17239 static void
17240 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17241 {
17242 dw_loc_descr_ref copy;
17243 dw_loc_descr_ref ref_end = list->expr;
17244 add_loc_descr (&ref, list->expr);
17245 list->expr = ref;
17246 list = list->dw_loc_next;
17247 while (list)
17248 {
17249 dw_loc_descr_ref end = list->expr;
17250 list->expr = copy = copy_loc_descr (ref);
17251 while (copy->dw_loc_next != ref_end)
17252 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17253 copy->dw_loc_next = end;
17254 list = list->dw_loc_next;
17255 }
17256 }
17257
17258 /* Given two lists RET and LIST
17259 produce location list that is result of adding expression in LIST
17260 to expression in RET on each position in program.
17261 Might be destructive on both RET and LIST.
17262
17263 TODO: We handle only simple cases of RET or LIST having at most one
17264 element. General case would involve sorting the lists in program order
17265 and merging them that will need some additional work.
17266 Adding that will improve quality of debug info especially for SRA-ed
17267 structures. */
17268
17269 static void
17270 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17271 {
17272 if (!list)
17273 return;
17274 if (!*ret)
17275 {
17276 *ret = list;
17277 return;
17278 }
17279 if (!list->dw_loc_next)
17280 {
17281 add_loc_descr_to_each (*ret, list->expr);
17282 return;
17283 }
17284 if (!(*ret)->dw_loc_next)
17285 {
17286 prepend_loc_descr_to_each (list, (*ret)->expr);
17287 *ret = list;
17288 return;
17289 }
17290 expansion_failed (NULL_TREE, NULL_RTX,
17291 "Don't know how to merge two non-trivial"
17292 " location lists.\n");
17293 *ret = NULL;
17294 return;
17295 }
17296
17297 /* LOC is constant expression. Try a luck, look it up in constant
17298 pool and return its loc_descr of its address. */
17299
17300 static dw_loc_descr_ref
17301 cst_pool_loc_descr (tree loc)
17302 {
17303 /* Get an RTL for this, if something has been emitted. */
17304 rtx rtl = lookup_constant_def (loc);
17305
17306 if (!rtl || !MEM_P (rtl))
17307 {
17308 gcc_assert (!rtl);
17309 return 0;
17310 }
17311 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17312
17313 /* TODO: We might get more coverage if we was actually delaying expansion
17314 of all expressions till end of compilation when constant pools are fully
17315 populated. */
17316 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17317 {
17318 expansion_failed (loc, NULL_RTX,
17319 "CST value in contant pool but not marked.");
17320 return 0;
17321 }
17322 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17323 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17324 }
17325
17326 /* Return dw_loc_list representing address of addr_expr LOC
17327 by looking for inner INDIRECT_REF expression and turning
17328 it into simple arithmetics.
17329
17330 See loc_list_from_tree for the meaning of CONTEXT. */
17331
17332 static dw_loc_list_ref
17333 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17334 loc_descr_context *context)
17335 {
17336 tree obj, offset;
17337 poly_int64 bitsize, bitpos, bytepos;
17338 machine_mode mode;
17339 int unsignedp, reversep, volatilep = 0;
17340 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17341
17342 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17343 &bitsize, &bitpos, &offset, &mode,
17344 &unsignedp, &reversep, &volatilep);
17345 STRIP_NOPS (obj);
17346 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17347 {
17348 expansion_failed (loc, NULL_RTX, "bitfield access");
17349 return 0;
17350 }
17351 if (!INDIRECT_REF_P (obj))
17352 {
17353 expansion_failed (obj,
17354 NULL_RTX, "no indirect ref in inner refrence");
17355 return 0;
17356 }
17357 if (!offset && known_eq (bitpos, 0))
17358 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17359 context);
17360 else if (toplev
17361 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17362 && (dwarf_version >= 4 || !dwarf_strict))
17363 {
17364 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17365 if (!list_ret)
17366 return 0;
17367 if (offset)
17368 {
17369 /* Variable offset. */
17370 list_ret1 = loc_list_from_tree (offset, 0, context);
17371 if (list_ret1 == 0)
17372 return 0;
17373 add_loc_list (&list_ret, list_ret1);
17374 if (!list_ret)
17375 return 0;
17376 add_loc_descr_to_each (list_ret,
17377 new_loc_descr (DW_OP_plus, 0, 0));
17378 }
17379 HOST_WIDE_INT value;
17380 if (bytepos.is_constant (&value) && value > 0)
17381 add_loc_descr_to_each (list_ret,
17382 new_loc_descr (DW_OP_plus_uconst, value, 0));
17383 else if (maybe_ne (bytepos, 0))
17384 loc_list_plus_const (list_ret, bytepos);
17385 add_loc_descr_to_each (list_ret,
17386 new_loc_descr (DW_OP_stack_value, 0, 0));
17387 }
17388 return list_ret;
17389 }
17390
17391 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17392 all operations from LOC are nops, move to the last one. Insert in NOPS all
17393 operations that are skipped. */
17394
17395 static void
17396 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17397 hash_set<dw_loc_descr_ref> &nops)
17398 {
17399 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17400 {
17401 nops.add (loc);
17402 loc = loc->dw_loc_next;
17403 }
17404 }
17405
17406 /* Helper for loc_descr_without_nops: free the location description operation
17407 P. */
17408
17409 bool
17410 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17411 {
17412 ggc_free (loc);
17413 return true;
17414 }
17415
17416 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17417 finishes LOC. */
17418
17419 static void
17420 loc_descr_without_nops (dw_loc_descr_ref &loc)
17421 {
17422 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17423 return;
17424
17425 /* Set of all DW_OP_nop operations we remove. */
17426 hash_set<dw_loc_descr_ref> nops;
17427
17428 /* First, strip all prefix NOP operations in order to keep the head of the
17429 operations list. */
17430 loc_descr_to_next_no_nop (loc, nops);
17431
17432 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17433 {
17434 /* For control flow operations: strip "prefix" nops in destination
17435 labels. */
17436 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17437 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17438 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17439 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17440
17441 /* Do the same for the operations that follow, then move to the next
17442 iteration. */
17443 if (cur->dw_loc_next != NULL)
17444 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17445 cur = cur->dw_loc_next;
17446 }
17447
17448 nops.traverse<void *, free_loc_descr> (NULL);
17449 }
17450
17451
17452 struct dwarf_procedure_info;
17453
17454 /* Helper structure for location descriptions generation. */
17455 struct loc_descr_context
17456 {
17457 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17458 NULL_TREE if DW_OP_push_object_address in invalid for this location
17459 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17460 tree context_type;
17461 /* The ..._DECL node that should be translated as a
17462 DW_OP_push_object_address operation. */
17463 tree base_decl;
17464 /* Information about the DWARF procedure we are currently generating. NULL if
17465 we are not generating a DWARF procedure. */
17466 struct dwarf_procedure_info *dpi;
17467 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17468 by consumer. Used for DW_TAG_generic_subrange attributes. */
17469 bool placeholder_arg;
17470 /* True if PLACEHOLDER_EXPR has been seen. */
17471 bool placeholder_seen;
17472 };
17473
17474 /* DWARF procedures generation
17475
17476 DWARF expressions (aka. location descriptions) are used to encode variable
17477 things such as sizes or offsets. Such computations can have redundant parts
17478 that can be factorized in order to reduce the size of the output debug
17479 information. This is the whole point of DWARF procedures.
17480
17481 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17482 already factorized into functions ("size functions") in order to handle very
17483 big and complex types. Such functions are quite simple: they have integral
17484 arguments, they return an integral result and their body contains only a
17485 return statement with arithmetic expressions. This is the only kind of
17486 function we are interested in translating into DWARF procedures, here.
17487
17488 DWARF expressions and DWARF procedure are executed using a stack, so we have
17489 to define some calling convention for them to interact. Let's say that:
17490
17491 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17492 all arguments in reverse order (right-to-left) so that when the DWARF
17493 procedure execution starts, the first argument is the top of the stack.
17494
17495 - Then, when returning, the DWARF procedure must have consumed all arguments
17496 on the stack, must have pushed the result and touched nothing else.
17497
17498 - Each integral argument and the result are integral types can be hold in a
17499 single stack slot.
17500
17501 - We call "frame offset" the number of stack slots that are "under DWARF
17502 procedure control": it includes the arguments slots, the temporaries and
17503 the result slot. Thus, it is equal to the number of arguments when the
17504 procedure execution starts and must be equal to one (the result) when it
17505 returns. */
17506
17507 /* Helper structure used when generating operations for a DWARF procedure. */
17508 struct dwarf_procedure_info
17509 {
17510 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17511 currently translated. */
17512 tree fndecl;
17513 /* The number of arguments FNDECL takes. */
17514 unsigned args_count;
17515 };
17516
17517 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17518 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17519 equate it to this DIE. */
17520
17521 static dw_die_ref
17522 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17523 dw_die_ref parent_die)
17524 {
17525 dw_die_ref dwarf_proc_die;
17526
17527 if ((dwarf_version < 3 && dwarf_strict)
17528 || location == NULL)
17529 return NULL;
17530
17531 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17532 if (fndecl)
17533 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17534 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17535 return dwarf_proc_die;
17536 }
17537
17538 /* Return whether TYPE is a supported type as a DWARF procedure argument
17539 type or return type (we handle only scalar types and pointer types that
17540 aren't wider than the DWARF expression evaluation stack. */
17541
17542 static bool
17543 is_handled_procedure_type (tree type)
17544 {
17545 return ((INTEGRAL_TYPE_P (type)
17546 || TREE_CODE (type) == OFFSET_TYPE
17547 || TREE_CODE (type) == POINTER_TYPE)
17548 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17549 }
17550
17551 /* Helper for resolve_args_picking: do the same but stop when coming across
17552 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17553 offset *before* evaluating the corresponding operation. */
17554
17555 static bool
17556 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17557 struct dwarf_procedure_info *dpi,
17558 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17559 {
17560 /* The "frame_offset" identifier is already used to name a macro... */
17561 unsigned frame_offset_ = initial_frame_offset;
17562 dw_loc_descr_ref l;
17563
17564 for (l = loc; l != NULL;)
17565 {
17566 bool existed;
17567 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17568
17569 /* If we already met this node, there is nothing to compute anymore. */
17570 if (existed)
17571 {
17572 /* Make sure that the stack size is consistent wherever the execution
17573 flow comes from. */
17574 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17575 break;
17576 }
17577 l_frame_offset = frame_offset_;
17578
17579 /* If needed, relocate the picking offset with respect to the frame
17580 offset. */
17581 if (l->frame_offset_rel)
17582 {
17583 unsigned HOST_WIDE_INT off;
17584 switch (l->dw_loc_opc)
17585 {
17586 case DW_OP_pick:
17587 off = l->dw_loc_oprnd1.v.val_unsigned;
17588 break;
17589 case DW_OP_dup:
17590 off = 0;
17591 break;
17592 case DW_OP_over:
17593 off = 1;
17594 break;
17595 default:
17596 gcc_unreachable ();
17597 }
17598 /* frame_offset_ is the size of the current stack frame, including
17599 incoming arguments. Besides, the arguments are pushed
17600 right-to-left. Thus, in order to access the Nth argument from
17601 this operation node, the picking has to skip temporaries *plus*
17602 one stack slot per argument (0 for the first one, 1 for the second
17603 one, etc.).
17604
17605 The targetted argument number (N) is already set as the operand,
17606 and the number of temporaries can be computed with:
17607 frame_offsets_ - dpi->args_count */
17608 off += frame_offset_ - dpi->args_count;
17609
17610 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17611 if (off > 255)
17612 return false;
17613
17614 if (off == 0)
17615 {
17616 l->dw_loc_opc = DW_OP_dup;
17617 l->dw_loc_oprnd1.v.val_unsigned = 0;
17618 }
17619 else if (off == 1)
17620 {
17621 l->dw_loc_opc = DW_OP_over;
17622 l->dw_loc_oprnd1.v.val_unsigned = 0;
17623 }
17624 else
17625 {
17626 l->dw_loc_opc = DW_OP_pick;
17627 l->dw_loc_oprnd1.v.val_unsigned = off;
17628 }
17629 }
17630
17631 /* Update frame_offset according to the effect the current operation has
17632 on the stack. */
17633 switch (l->dw_loc_opc)
17634 {
17635 case DW_OP_deref:
17636 case DW_OP_swap:
17637 case DW_OP_rot:
17638 case DW_OP_abs:
17639 case DW_OP_neg:
17640 case DW_OP_not:
17641 case DW_OP_plus_uconst:
17642 case DW_OP_skip:
17643 case DW_OP_reg0:
17644 case DW_OP_reg1:
17645 case DW_OP_reg2:
17646 case DW_OP_reg3:
17647 case DW_OP_reg4:
17648 case DW_OP_reg5:
17649 case DW_OP_reg6:
17650 case DW_OP_reg7:
17651 case DW_OP_reg8:
17652 case DW_OP_reg9:
17653 case DW_OP_reg10:
17654 case DW_OP_reg11:
17655 case DW_OP_reg12:
17656 case DW_OP_reg13:
17657 case DW_OP_reg14:
17658 case DW_OP_reg15:
17659 case DW_OP_reg16:
17660 case DW_OP_reg17:
17661 case DW_OP_reg18:
17662 case DW_OP_reg19:
17663 case DW_OP_reg20:
17664 case DW_OP_reg21:
17665 case DW_OP_reg22:
17666 case DW_OP_reg23:
17667 case DW_OP_reg24:
17668 case DW_OP_reg25:
17669 case DW_OP_reg26:
17670 case DW_OP_reg27:
17671 case DW_OP_reg28:
17672 case DW_OP_reg29:
17673 case DW_OP_reg30:
17674 case DW_OP_reg31:
17675 case DW_OP_bregx:
17676 case DW_OP_piece:
17677 case DW_OP_deref_size:
17678 case DW_OP_nop:
17679 case DW_OP_bit_piece:
17680 case DW_OP_implicit_value:
17681 case DW_OP_stack_value:
17682 break;
17683
17684 case DW_OP_addr:
17685 case DW_OP_const1u:
17686 case DW_OP_const1s:
17687 case DW_OP_const2u:
17688 case DW_OP_const2s:
17689 case DW_OP_const4u:
17690 case DW_OP_const4s:
17691 case DW_OP_const8u:
17692 case DW_OP_const8s:
17693 case DW_OP_constu:
17694 case DW_OP_consts:
17695 case DW_OP_dup:
17696 case DW_OP_over:
17697 case DW_OP_pick:
17698 case DW_OP_lit0:
17699 case DW_OP_lit1:
17700 case DW_OP_lit2:
17701 case DW_OP_lit3:
17702 case DW_OP_lit4:
17703 case DW_OP_lit5:
17704 case DW_OP_lit6:
17705 case DW_OP_lit7:
17706 case DW_OP_lit8:
17707 case DW_OP_lit9:
17708 case DW_OP_lit10:
17709 case DW_OP_lit11:
17710 case DW_OP_lit12:
17711 case DW_OP_lit13:
17712 case DW_OP_lit14:
17713 case DW_OP_lit15:
17714 case DW_OP_lit16:
17715 case DW_OP_lit17:
17716 case DW_OP_lit18:
17717 case DW_OP_lit19:
17718 case DW_OP_lit20:
17719 case DW_OP_lit21:
17720 case DW_OP_lit22:
17721 case DW_OP_lit23:
17722 case DW_OP_lit24:
17723 case DW_OP_lit25:
17724 case DW_OP_lit26:
17725 case DW_OP_lit27:
17726 case DW_OP_lit28:
17727 case DW_OP_lit29:
17728 case DW_OP_lit30:
17729 case DW_OP_lit31:
17730 case DW_OP_breg0:
17731 case DW_OP_breg1:
17732 case DW_OP_breg2:
17733 case DW_OP_breg3:
17734 case DW_OP_breg4:
17735 case DW_OP_breg5:
17736 case DW_OP_breg6:
17737 case DW_OP_breg7:
17738 case DW_OP_breg8:
17739 case DW_OP_breg9:
17740 case DW_OP_breg10:
17741 case DW_OP_breg11:
17742 case DW_OP_breg12:
17743 case DW_OP_breg13:
17744 case DW_OP_breg14:
17745 case DW_OP_breg15:
17746 case DW_OP_breg16:
17747 case DW_OP_breg17:
17748 case DW_OP_breg18:
17749 case DW_OP_breg19:
17750 case DW_OP_breg20:
17751 case DW_OP_breg21:
17752 case DW_OP_breg22:
17753 case DW_OP_breg23:
17754 case DW_OP_breg24:
17755 case DW_OP_breg25:
17756 case DW_OP_breg26:
17757 case DW_OP_breg27:
17758 case DW_OP_breg28:
17759 case DW_OP_breg29:
17760 case DW_OP_breg30:
17761 case DW_OP_breg31:
17762 case DW_OP_fbreg:
17763 case DW_OP_push_object_address:
17764 case DW_OP_call_frame_cfa:
17765 case DW_OP_GNU_variable_value:
17766 ++frame_offset_;
17767 break;
17768
17769 case DW_OP_drop:
17770 case DW_OP_xderef:
17771 case DW_OP_and:
17772 case DW_OP_div:
17773 case DW_OP_minus:
17774 case DW_OP_mod:
17775 case DW_OP_mul:
17776 case DW_OP_or:
17777 case DW_OP_plus:
17778 case DW_OP_shl:
17779 case DW_OP_shr:
17780 case DW_OP_shra:
17781 case DW_OP_xor:
17782 case DW_OP_bra:
17783 case DW_OP_eq:
17784 case DW_OP_ge:
17785 case DW_OP_gt:
17786 case DW_OP_le:
17787 case DW_OP_lt:
17788 case DW_OP_ne:
17789 case DW_OP_regx:
17790 case DW_OP_xderef_size:
17791 --frame_offset_;
17792 break;
17793
17794 case DW_OP_call2:
17795 case DW_OP_call4:
17796 case DW_OP_call_ref:
17797 {
17798 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17799 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17800
17801 if (stack_usage == NULL)
17802 return false;
17803 frame_offset_ += *stack_usage;
17804 break;
17805 }
17806
17807 case DW_OP_implicit_pointer:
17808 case DW_OP_entry_value:
17809 case DW_OP_const_type:
17810 case DW_OP_regval_type:
17811 case DW_OP_deref_type:
17812 case DW_OP_convert:
17813 case DW_OP_reinterpret:
17814 case DW_OP_form_tls_address:
17815 case DW_OP_GNU_push_tls_address:
17816 case DW_OP_GNU_uninit:
17817 case DW_OP_GNU_encoded_addr:
17818 case DW_OP_GNU_implicit_pointer:
17819 case DW_OP_GNU_entry_value:
17820 case DW_OP_GNU_const_type:
17821 case DW_OP_GNU_regval_type:
17822 case DW_OP_GNU_deref_type:
17823 case DW_OP_GNU_convert:
17824 case DW_OP_GNU_reinterpret:
17825 case DW_OP_GNU_parameter_ref:
17826 /* loc_list_from_tree will probably not output these operations for
17827 size functions, so assume they will not appear here. */
17828 /* Fall through... */
17829
17830 default:
17831 gcc_unreachable ();
17832 }
17833
17834 /* Now, follow the control flow (except subroutine calls). */
17835 switch (l->dw_loc_opc)
17836 {
17837 case DW_OP_bra:
17838 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17839 frame_offsets))
17840 return false;
17841 /* Fall through. */
17842
17843 case DW_OP_skip:
17844 l = l->dw_loc_oprnd1.v.val_loc;
17845 break;
17846
17847 case DW_OP_stack_value:
17848 return true;
17849
17850 default:
17851 l = l->dw_loc_next;
17852 break;
17853 }
17854 }
17855
17856 return true;
17857 }
17858
17859 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17860 operations) in order to resolve the operand of DW_OP_pick operations that
17861 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17862 offset *before* LOC is executed. Return if all relocations were
17863 successful. */
17864
17865 static bool
17866 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17867 struct dwarf_procedure_info *dpi)
17868 {
17869 /* Associate to all visited operations the frame offset *before* evaluating
17870 this operation. */
17871 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17872
17873 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17874 frame_offsets);
17875 }
17876
17877 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17878 Return NULL if it is not possible. */
17879
17880 static dw_die_ref
17881 function_to_dwarf_procedure (tree fndecl)
17882 {
17883 struct loc_descr_context ctx;
17884 struct dwarf_procedure_info dpi;
17885 dw_die_ref dwarf_proc_die;
17886 tree tree_body = DECL_SAVED_TREE (fndecl);
17887 dw_loc_descr_ref loc_body, epilogue;
17888
17889 tree cursor;
17890 unsigned i;
17891
17892 /* Do not generate multiple DWARF procedures for the same function
17893 declaration. */
17894 dwarf_proc_die = lookup_decl_die (fndecl);
17895 if (dwarf_proc_die != NULL)
17896 return dwarf_proc_die;
17897
17898 /* DWARF procedures are available starting with the DWARFv3 standard. */
17899 if (dwarf_version < 3 && dwarf_strict)
17900 return NULL;
17901
17902 /* We handle only functions for which we still have a body, that return a
17903 supported type and that takes arguments with supported types. Note that
17904 there is no point translating functions that return nothing. */
17905 if (tree_body == NULL_TREE
17906 || DECL_RESULT (fndecl) == NULL_TREE
17907 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17908 return NULL;
17909
17910 for (cursor = DECL_ARGUMENTS (fndecl);
17911 cursor != NULL_TREE;
17912 cursor = TREE_CHAIN (cursor))
17913 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17914 return NULL;
17915
17916 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17917 if (TREE_CODE (tree_body) != RETURN_EXPR)
17918 return NULL;
17919 tree_body = TREE_OPERAND (tree_body, 0);
17920 if (TREE_CODE (tree_body) != MODIFY_EXPR
17921 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17922 return NULL;
17923 tree_body = TREE_OPERAND (tree_body, 1);
17924
17925 /* Try to translate the body expression itself. Note that this will probably
17926 cause an infinite recursion if its call graph has a cycle. This is very
17927 unlikely for size functions, however, so don't bother with such things at
17928 the moment. */
17929 ctx.context_type = NULL_TREE;
17930 ctx.base_decl = NULL_TREE;
17931 ctx.dpi = &dpi;
17932 ctx.placeholder_arg = false;
17933 ctx.placeholder_seen = false;
17934 dpi.fndecl = fndecl;
17935 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17936 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17937 if (!loc_body)
17938 return NULL;
17939
17940 /* After evaluating all operands in "loc_body", we should still have on the
17941 stack all arguments plus the desired function result (top of the stack).
17942 Generate code in order to keep only the result in our stack frame. */
17943 epilogue = NULL;
17944 for (i = 0; i < dpi.args_count; ++i)
17945 {
17946 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17947 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17948 op_couple->dw_loc_next->dw_loc_next = epilogue;
17949 epilogue = op_couple;
17950 }
17951 add_loc_descr (&loc_body, epilogue);
17952 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17953 return NULL;
17954
17955 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17956 because they are considered useful. Now there is an epilogue, they are
17957 not anymore, so give it another try. */
17958 loc_descr_without_nops (loc_body);
17959
17960 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17961 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17962 though, given that size functions do not come from source, so they should
17963 not have a dedicated DW_TAG_subprogram DIE. */
17964 dwarf_proc_die
17965 = new_dwarf_proc_die (loc_body, fndecl,
17966 get_context_die (DECL_CONTEXT (fndecl)));
17967
17968 /* The called DWARF procedure consumes one stack slot per argument and
17969 returns one stack slot. */
17970 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17971
17972 return dwarf_proc_die;
17973 }
17974
17975
17976 /* Generate Dwarf location list representing LOC.
17977 If WANT_ADDRESS is false, expression computing LOC will be computed
17978 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
17979 if WANT_ADDRESS is 2, expression computing address useable in location
17980 will be returned (i.e. DW_OP_reg can be used
17981 to refer to register values).
17982
17983 CONTEXT provides information to customize the location descriptions
17984 generation. Its context_type field specifies what type is implicitly
17985 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
17986 will not be generated.
17987
17988 Its DPI field determines whether we are generating a DWARF expression for a
17989 DWARF procedure, so PARM_DECL references are processed specifically.
17990
17991 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
17992 and dpi fields were null. */
17993
17994 static dw_loc_list_ref
17995 loc_list_from_tree_1 (tree loc, int want_address,
17996 struct loc_descr_context *context)
17997 {
17998 dw_loc_descr_ref ret = NULL, ret1 = NULL;
17999 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18000 int have_address = 0;
18001 enum dwarf_location_atom op;
18002
18003 /* ??? Most of the time we do not take proper care for sign/zero
18004 extending the values properly. Hopefully this won't be a real
18005 problem... */
18006
18007 if (context != NULL
18008 && context->base_decl == loc
18009 && want_address == 0)
18010 {
18011 if (dwarf_version >= 3 || !dwarf_strict)
18012 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18013 NULL, 0, NULL, 0, NULL);
18014 else
18015 return NULL;
18016 }
18017
18018 switch (TREE_CODE (loc))
18019 {
18020 case ERROR_MARK:
18021 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18022 return 0;
18023
18024 case PLACEHOLDER_EXPR:
18025 /* This case involves extracting fields from an object to determine the
18026 position of other fields. It is supposed to appear only as the first
18027 operand of COMPONENT_REF nodes and to reference precisely the type
18028 that the context allows. */
18029 if (context != NULL
18030 && TREE_TYPE (loc) == context->context_type
18031 && want_address >= 1)
18032 {
18033 if (dwarf_version >= 3 || !dwarf_strict)
18034 {
18035 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18036 have_address = 1;
18037 break;
18038 }
18039 else
18040 return NULL;
18041 }
18042 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18043 the single argument passed by consumer. */
18044 else if (context != NULL
18045 && context->placeholder_arg
18046 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18047 && want_address == 0)
18048 {
18049 ret = new_loc_descr (DW_OP_pick, 0, 0);
18050 ret->frame_offset_rel = 1;
18051 context->placeholder_seen = true;
18052 break;
18053 }
18054 else
18055 expansion_failed (loc, NULL_RTX,
18056 "PLACEHOLDER_EXPR for an unexpected type");
18057 break;
18058
18059 case CALL_EXPR:
18060 {
18061 const int nargs = call_expr_nargs (loc);
18062 tree callee = get_callee_fndecl (loc);
18063 int i;
18064 dw_die_ref dwarf_proc;
18065
18066 if (callee == NULL_TREE)
18067 goto call_expansion_failed;
18068
18069 /* We handle only functions that return an integer. */
18070 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18071 goto call_expansion_failed;
18072
18073 dwarf_proc = function_to_dwarf_procedure (callee);
18074 if (dwarf_proc == NULL)
18075 goto call_expansion_failed;
18076
18077 /* Evaluate arguments right-to-left so that the first argument will
18078 be the top-most one on the stack. */
18079 for (i = nargs - 1; i >= 0; --i)
18080 {
18081 dw_loc_descr_ref loc_descr
18082 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18083 context);
18084
18085 if (loc_descr == NULL)
18086 goto call_expansion_failed;
18087
18088 add_loc_descr (&ret, loc_descr);
18089 }
18090
18091 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18092 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18093 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18094 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18095 add_loc_descr (&ret, ret1);
18096 break;
18097
18098 call_expansion_failed:
18099 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18100 /* There are no opcodes for these operations. */
18101 return 0;
18102 }
18103
18104 case PREINCREMENT_EXPR:
18105 case PREDECREMENT_EXPR:
18106 case POSTINCREMENT_EXPR:
18107 case POSTDECREMENT_EXPR:
18108 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18109 /* There are no opcodes for these operations. */
18110 return 0;
18111
18112 case ADDR_EXPR:
18113 /* If we already want an address, see if there is INDIRECT_REF inside
18114 e.g. for &this->field. */
18115 if (want_address)
18116 {
18117 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18118 (loc, want_address == 2, context);
18119 if (list_ret)
18120 have_address = 1;
18121 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18122 && (ret = cst_pool_loc_descr (loc)))
18123 have_address = 1;
18124 }
18125 /* Otherwise, process the argument and look for the address. */
18126 if (!list_ret && !ret)
18127 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18128 else
18129 {
18130 if (want_address)
18131 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18132 return NULL;
18133 }
18134 break;
18135
18136 case VAR_DECL:
18137 if (DECL_THREAD_LOCAL_P (loc))
18138 {
18139 rtx rtl;
18140 enum dwarf_location_atom tls_op;
18141 enum dtprel_bool dtprel = dtprel_false;
18142
18143 if (targetm.have_tls)
18144 {
18145 /* If this is not defined, we have no way to emit the
18146 data. */
18147 if (!targetm.asm_out.output_dwarf_dtprel)
18148 return 0;
18149
18150 /* The way DW_OP_GNU_push_tls_address is specified, we
18151 can only look up addresses of objects in the current
18152 module. We used DW_OP_addr as first op, but that's
18153 wrong, because DW_OP_addr is relocated by the debug
18154 info consumer, while DW_OP_GNU_push_tls_address
18155 operand shouldn't be. */
18156 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18157 return 0;
18158 dtprel = dtprel_true;
18159 /* We check for DWARF 5 here because gdb did not implement
18160 DW_OP_form_tls_address until after 7.12. */
18161 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18162 : DW_OP_GNU_push_tls_address);
18163 }
18164 else
18165 {
18166 if (!targetm.emutls.debug_form_tls_address
18167 || !(dwarf_version >= 3 || !dwarf_strict))
18168 return 0;
18169 /* We stuffed the control variable into the DECL_VALUE_EXPR
18170 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18171 no longer appear in gimple code. We used the control
18172 variable in specific so that we could pick it up here. */
18173 loc = DECL_VALUE_EXPR (loc);
18174 tls_op = DW_OP_form_tls_address;
18175 }
18176
18177 rtl = rtl_for_decl_location (loc);
18178 if (rtl == NULL_RTX)
18179 return 0;
18180
18181 if (!MEM_P (rtl))
18182 return 0;
18183 rtl = XEXP (rtl, 0);
18184 if (! CONSTANT_P (rtl))
18185 return 0;
18186
18187 ret = new_addr_loc_descr (rtl, dtprel);
18188 ret1 = new_loc_descr (tls_op, 0, 0);
18189 add_loc_descr (&ret, ret1);
18190
18191 have_address = 1;
18192 break;
18193 }
18194 /* FALLTHRU */
18195
18196 case PARM_DECL:
18197 if (context != NULL && context->dpi != NULL
18198 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18199 {
18200 /* We are generating code for a DWARF procedure and we want to access
18201 one of its arguments: find the appropriate argument offset and let
18202 the resolve_args_picking pass compute the offset that complies
18203 with the stack frame size. */
18204 unsigned i = 0;
18205 tree cursor;
18206
18207 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18208 cursor != NULL_TREE && cursor != loc;
18209 cursor = TREE_CHAIN (cursor), ++i)
18210 ;
18211 /* If we are translating a DWARF procedure, all referenced parameters
18212 must belong to the current function. */
18213 gcc_assert (cursor != NULL_TREE);
18214
18215 ret = new_loc_descr (DW_OP_pick, i, 0);
18216 ret->frame_offset_rel = 1;
18217 break;
18218 }
18219 /* FALLTHRU */
18220
18221 case RESULT_DECL:
18222 if (DECL_HAS_VALUE_EXPR_P (loc))
18223 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18224 want_address, context);
18225 /* FALLTHRU */
18226
18227 case FUNCTION_DECL:
18228 {
18229 rtx rtl;
18230 var_loc_list *loc_list = lookup_decl_loc (loc);
18231
18232 if (loc_list && loc_list->first)
18233 {
18234 list_ret = dw_loc_list (loc_list, loc, want_address);
18235 have_address = want_address != 0;
18236 break;
18237 }
18238 rtl = rtl_for_decl_location (loc);
18239 if (rtl == NULL_RTX)
18240 {
18241 if (TREE_CODE (loc) != FUNCTION_DECL
18242 && early_dwarf
18243 && current_function_decl
18244 && want_address != 1
18245 && ! DECL_IGNORED_P (loc)
18246 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18247 || POINTER_TYPE_P (TREE_TYPE (loc)))
18248 && DECL_CONTEXT (loc) == current_function_decl
18249 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18250 <= DWARF2_ADDR_SIZE))
18251 {
18252 dw_die_ref ref = lookup_decl_die (loc);
18253 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18254 if (ref)
18255 {
18256 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18257 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18258 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18259 }
18260 else
18261 {
18262 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18263 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18264 }
18265 break;
18266 }
18267 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18268 return 0;
18269 }
18270 else if (CONST_INT_P (rtl))
18271 {
18272 HOST_WIDE_INT val = INTVAL (rtl);
18273 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18274 val &= GET_MODE_MASK (DECL_MODE (loc));
18275 ret = int_loc_descriptor (val);
18276 }
18277 else if (GET_CODE (rtl) == CONST_STRING)
18278 {
18279 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18280 return 0;
18281 }
18282 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18283 ret = new_addr_loc_descr (rtl, dtprel_false);
18284 else
18285 {
18286 machine_mode mode, mem_mode;
18287
18288 /* Certain constructs can only be represented at top-level. */
18289 if (want_address == 2)
18290 {
18291 ret = loc_descriptor (rtl, VOIDmode,
18292 VAR_INIT_STATUS_INITIALIZED);
18293 have_address = 1;
18294 }
18295 else
18296 {
18297 mode = GET_MODE (rtl);
18298 mem_mode = VOIDmode;
18299 if (MEM_P (rtl))
18300 {
18301 mem_mode = mode;
18302 mode = get_address_mode (rtl);
18303 rtl = XEXP (rtl, 0);
18304 have_address = 1;
18305 }
18306 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18307 VAR_INIT_STATUS_INITIALIZED);
18308 }
18309 if (!ret)
18310 expansion_failed (loc, rtl,
18311 "failed to produce loc descriptor for rtl");
18312 }
18313 }
18314 break;
18315
18316 case MEM_REF:
18317 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18318 {
18319 have_address = 1;
18320 goto do_plus;
18321 }
18322 /* Fallthru. */
18323 case INDIRECT_REF:
18324 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18325 have_address = 1;
18326 break;
18327
18328 case TARGET_MEM_REF:
18329 case SSA_NAME:
18330 case DEBUG_EXPR_DECL:
18331 return NULL;
18332
18333 case COMPOUND_EXPR:
18334 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18335 context);
18336
18337 CASE_CONVERT:
18338 case VIEW_CONVERT_EXPR:
18339 case SAVE_EXPR:
18340 case MODIFY_EXPR:
18341 case NON_LVALUE_EXPR:
18342 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18343 context);
18344
18345 case COMPONENT_REF:
18346 case BIT_FIELD_REF:
18347 case ARRAY_REF:
18348 case ARRAY_RANGE_REF:
18349 case REALPART_EXPR:
18350 case IMAGPART_EXPR:
18351 {
18352 tree obj, offset;
18353 poly_int64 bitsize, bitpos, bytepos;
18354 machine_mode mode;
18355 int unsignedp, reversep, volatilep = 0;
18356
18357 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18358 &unsignedp, &reversep, &volatilep);
18359
18360 gcc_assert (obj != loc);
18361
18362 list_ret = loc_list_from_tree_1 (obj,
18363 want_address == 2
18364 && known_eq (bitpos, 0)
18365 && !offset ? 2 : 1,
18366 context);
18367 /* TODO: We can extract value of the small expression via shifting even
18368 for nonzero bitpos. */
18369 if (list_ret == 0)
18370 return 0;
18371 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18372 || !multiple_p (bitsize, BITS_PER_UNIT))
18373 {
18374 expansion_failed (loc, NULL_RTX,
18375 "bitfield access");
18376 return 0;
18377 }
18378
18379 if (offset != NULL_TREE)
18380 {
18381 /* Variable offset. */
18382 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18383 if (list_ret1 == 0)
18384 return 0;
18385 add_loc_list (&list_ret, list_ret1);
18386 if (!list_ret)
18387 return 0;
18388 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18389 }
18390
18391 HOST_WIDE_INT value;
18392 if (bytepos.is_constant (&value) && value > 0)
18393 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18394 value, 0));
18395 else if (maybe_ne (bytepos, 0))
18396 loc_list_plus_const (list_ret, bytepos);
18397
18398 have_address = 1;
18399 break;
18400 }
18401
18402 case INTEGER_CST:
18403 if ((want_address || !tree_fits_shwi_p (loc))
18404 && (ret = cst_pool_loc_descr (loc)))
18405 have_address = 1;
18406 else if (want_address == 2
18407 && tree_fits_shwi_p (loc)
18408 && (ret = address_of_int_loc_descriptor
18409 (int_size_in_bytes (TREE_TYPE (loc)),
18410 tree_to_shwi (loc))))
18411 have_address = 1;
18412 else if (tree_fits_shwi_p (loc))
18413 ret = int_loc_descriptor (tree_to_shwi (loc));
18414 else if (tree_fits_uhwi_p (loc))
18415 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18416 else
18417 {
18418 expansion_failed (loc, NULL_RTX,
18419 "Integer operand is not host integer");
18420 return 0;
18421 }
18422 break;
18423
18424 case CONSTRUCTOR:
18425 case REAL_CST:
18426 case STRING_CST:
18427 case COMPLEX_CST:
18428 if ((ret = cst_pool_loc_descr (loc)))
18429 have_address = 1;
18430 else if (TREE_CODE (loc) == CONSTRUCTOR)
18431 {
18432 tree type = TREE_TYPE (loc);
18433 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18434 unsigned HOST_WIDE_INT offset = 0;
18435 unsigned HOST_WIDE_INT cnt;
18436 constructor_elt *ce;
18437
18438 if (TREE_CODE (type) == RECORD_TYPE)
18439 {
18440 /* This is very limited, but it's enough to output
18441 pointers to member functions, as long as the
18442 referenced function is defined in the current
18443 translation unit. */
18444 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18445 {
18446 tree val = ce->value;
18447
18448 tree field = ce->index;
18449
18450 if (val)
18451 STRIP_NOPS (val);
18452
18453 if (!field || DECL_BIT_FIELD (field))
18454 {
18455 expansion_failed (loc, NULL_RTX,
18456 "bitfield in record type constructor");
18457 size = offset = (unsigned HOST_WIDE_INT)-1;
18458 ret = NULL;
18459 break;
18460 }
18461
18462 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18463 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18464 gcc_assert (pos + fieldsize <= size);
18465 if (pos < offset)
18466 {
18467 expansion_failed (loc, NULL_RTX,
18468 "out-of-order fields in record constructor");
18469 size = offset = (unsigned HOST_WIDE_INT)-1;
18470 ret = NULL;
18471 break;
18472 }
18473 if (pos > offset)
18474 {
18475 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18476 add_loc_descr (&ret, ret1);
18477 offset = pos;
18478 }
18479 if (val && fieldsize != 0)
18480 {
18481 ret1 = loc_descriptor_from_tree (val, want_address, context);
18482 if (!ret1)
18483 {
18484 expansion_failed (loc, NULL_RTX,
18485 "unsupported expression in field");
18486 size = offset = (unsigned HOST_WIDE_INT)-1;
18487 ret = NULL;
18488 break;
18489 }
18490 add_loc_descr (&ret, ret1);
18491 }
18492 if (fieldsize)
18493 {
18494 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18495 add_loc_descr (&ret, ret1);
18496 offset = pos + fieldsize;
18497 }
18498 }
18499
18500 if (offset != size)
18501 {
18502 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18503 add_loc_descr (&ret, ret1);
18504 offset = size;
18505 }
18506
18507 have_address = !!want_address;
18508 }
18509 else
18510 expansion_failed (loc, NULL_RTX,
18511 "constructor of non-record type");
18512 }
18513 else
18514 /* We can construct small constants here using int_loc_descriptor. */
18515 expansion_failed (loc, NULL_RTX,
18516 "constructor or constant not in constant pool");
18517 break;
18518
18519 case TRUTH_AND_EXPR:
18520 case TRUTH_ANDIF_EXPR:
18521 case BIT_AND_EXPR:
18522 op = DW_OP_and;
18523 goto do_binop;
18524
18525 case TRUTH_XOR_EXPR:
18526 case BIT_XOR_EXPR:
18527 op = DW_OP_xor;
18528 goto do_binop;
18529
18530 case TRUTH_OR_EXPR:
18531 case TRUTH_ORIF_EXPR:
18532 case BIT_IOR_EXPR:
18533 op = DW_OP_or;
18534 goto do_binop;
18535
18536 case FLOOR_DIV_EXPR:
18537 case CEIL_DIV_EXPR:
18538 case ROUND_DIV_EXPR:
18539 case TRUNC_DIV_EXPR:
18540 case EXACT_DIV_EXPR:
18541 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18542 return 0;
18543 op = DW_OP_div;
18544 goto do_binop;
18545
18546 case MINUS_EXPR:
18547 op = DW_OP_minus;
18548 goto do_binop;
18549
18550 case FLOOR_MOD_EXPR:
18551 case CEIL_MOD_EXPR:
18552 case ROUND_MOD_EXPR:
18553 case TRUNC_MOD_EXPR:
18554 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18555 {
18556 op = DW_OP_mod;
18557 goto do_binop;
18558 }
18559 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18560 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18561 if (list_ret == 0 || list_ret1 == 0)
18562 return 0;
18563
18564 add_loc_list (&list_ret, list_ret1);
18565 if (list_ret == 0)
18566 return 0;
18567 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18568 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18569 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18570 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18571 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18572 break;
18573
18574 case MULT_EXPR:
18575 op = DW_OP_mul;
18576 goto do_binop;
18577
18578 case LSHIFT_EXPR:
18579 op = DW_OP_shl;
18580 goto do_binop;
18581
18582 case RSHIFT_EXPR:
18583 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18584 goto do_binop;
18585
18586 case POINTER_PLUS_EXPR:
18587 case PLUS_EXPR:
18588 do_plus:
18589 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18590 {
18591 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18592 smarter to encode their opposite. The DW_OP_plus_uconst operation
18593 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18594 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18595 bytes, Y being the size of the operation that pushes the opposite
18596 of the addend. So let's choose the smallest representation. */
18597 const tree tree_addend = TREE_OPERAND (loc, 1);
18598 offset_int wi_addend;
18599 HOST_WIDE_INT shwi_addend;
18600 dw_loc_descr_ref loc_naddend;
18601
18602 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18603 if (list_ret == 0)
18604 return 0;
18605
18606 /* Try to get the literal to push. It is the opposite of the addend,
18607 so as we rely on wrapping during DWARF evaluation, first decode
18608 the literal as a "DWARF-sized" signed number. */
18609 wi_addend = wi::to_offset (tree_addend);
18610 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18611 shwi_addend = wi_addend.to_shwi ();
18612 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18613 ? int_loc_descriptor (-shwi_addend)
18614 : NULL;
18615
18616 if (loc_naddend != NULL
18617 && ((unsigned) size_of_uleb128 (shwi_addend)
18618 > size_of_loc_descr (loc_naddend)))
18619 {
18620 add_loc_descr_to_each (list_ret, loc_naddend);
18621 add_loc_descr_to_each (list_ret,
18622 new_loc_descr (DW_OP_minus, 0, 0));
18623 }
18624 else
18625 {
18626 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18627 {
18628 loc_naddend = loc_cur;
18629 loc_cur = loc_cur->dw_loc_next;
18630 ggc_free (loc_naddend);
18631 }
18632 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18633 }
18634 break;
18635 }
18636
18637 op = DW_OP_plus;
18638 goto do_binop;
18639
18640 case LE_EXPR:
18641 op = DW_OP_le;
18642 goto do_comp_binop;
18643
18644 case GE_EXPR:
18645 op = DW_OP_ge;
18646 goto do_comp_binop;
18647
18648 case LT_EXPR:
18649 op = DW_OP_lt;
18650 goto do_comp_binop;
18651
18652 case GT_EXPR:
18653 op = DW_OP_gt;
18654 goto do_comp_binop;
18655
18656 do_comp_binop:
18657 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18658 {
18659 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18660 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18661 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18662 TREE_CODE (loc));
18663 break;
18664 }
18665 else
18666 goto do_binop;
18667
18668 case EQ_EXPR:
18669 op = DW_OP_eq;
18670 goto do_binop;
18671
18672 case NE_EXPR:
18673 op = DW_OP_ne;
18674 goto do_binop;
18675
18676 do_binop:
18677 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18678 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18679 if (list_ret == 0 || list_ret1 == 0)
18680 return 0;
18681
18682 add_loc_list (&list_ret, list_ret1);
18683 if (list_ret == 0)
18684 return 0;
18685 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18686 break;
18687
18688 case TRUTH_NOT_EXPR:
18689 case BIT_NOT_EXPR:
18690 op = DW_OP_not;
18691 goto do_unop;
18692
18693 case ABS_EXPR:
18694 op = DW_OP_abs;
18695 goto do_unop;
18696
18697 case NEGATE_EXPR:
18698 op = DW_OP_neg;
18699 goto do_unop;
18700
18701 do_unop:
18702 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18703 if (list_ret == 0)
18704 return 0;
18705
18706 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18707 break;
18708
18709 case MIN_EXPR:
18710 case MAX_EXPR:
18711 {
18712 const enum tree_code code =
18713 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18714
18715 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18716 build2 (code, integer_type_node,
18717 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18718 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18719 }
18720
18721 /* fall through */
18722
18723 case COND_EXPR:
18724 {
18725 dw_loc_descr_ref lhs
18726 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18727 dw_loc_list_ref rhs
18728 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18729 dw_loc_descr_ref bra_node, jump_node, tmp;
18730
18731 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18732 if (list_ret == 0 || lhs == 0 || rhs == 0)
18733 return 0;
18734
18735 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18736 add_loc_descr_to_each (list_ret, bra_node);
18737
18738 add_loc_list (&list_ret, rhs);
18739 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18740 add_loc_descr_to_each (list_ret, jump_node);
18741
18742 add_loc_descr_to_each (list_ret, lhs);
18743 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18744 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18745
18746 /* ??? Need a node to point the skip at. Use a nop. */
18747 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18748 add_loc_descr_to_each (list_ret, tmp);
18749 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18750 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18751 }
18752 break;
18753
18754 case FIX_TRUNC_EXPR:
18755 return 0;
18756
18757 default:
18758 /* Leave front-end specific codes as simply unknown. This comes
18759 up, for instance, with the C STMT_EXPR. */
18760 if ((unsigned int) TREE_CODE (loc)
18761 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18762 {
18763 expansion_failed (loc, NULL_RTX,
18764 "language specific tree node");
18765 return 0;
18766 }
18767
18768 /* Otherwise this is a generic code; we should just lists all of
18769 these explicitly. We forgot one. */
18770 if (flag_checking)
18771 gcc_unreachable ();
18772
18773 /* In a release build, we want to degrade gracefully: better to
18774 generate incomplete debugging information than to crash. */
18775 return NULL;
18776 }
18777
18778 if (!ret && !list_ret)
18779 return 0;
18780
18781 if (want_address == 2 && !have_address
18782 && (dwarf_version >= 4 || !dwarf_strict))
18783 {
18784 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18785 {
18786 expansion_failed (loc, NULL_RTX,
18787 "DWARF address size mismatch");
18788 return 0;
18789 }
18790 if (ret)
18791 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18792 else
18793 add_loc_descr_to_each (list_ret,
18794 new_loc_descr (DW_OP_stack_value, 0, 0));
18795 have_address = 1;
18796 }
18797 /* Show if we can't fill the request for an address. */
18798 if (want_address && !have_address)
18799 {
18800 expansion_failed (loc, NULL_RTX,
18801 "Want address and only have value");
18802 return 0;
18803 }
18804
18805 gcc_assert (!ret || !list_ret);
18806
18807 /* If we've got an address and don't want one, dereference. */
18808 if (!want_address && have_address)
18809 {
18810 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18811
18812 if (size > DWARF2_ADDR_SIZE || size == -1)
18813 {
18814 expansion_failed (loc, NULL_RTX,
18815 "DWARF address size mismatch");
18816 return 0;
18817 }
18818 else if (size == DWARF2_ADDR_SIZE)
18819 op = DW_OP_deref;
18820 else
18821 op = DW_OP_deref_size;
18822
18823 if (ret)
18824 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18825 else
18826 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18827 }
18828 if (ret)
18829 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18830
18831 return list_ret;
18832 }
18833
18834 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18835 expressions. */
18836
18837 static dw_loc_list_ref
18838 loc_list_from_tree (tree loc, int want_address,
18839 struct loc_descr_context *context)
18840 {
18841 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18842
18843 for (dw_loc_list_ref loc_cur = result;
18844 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18845 loc_descr_without_nops (loc_cur->expr);
18846 return result;
18847 }
18848
18849 /* Same as above but return only single location expression. */
18850 static dw_loc_descr_ref
18851 loc_descriptor_from_tree (tree loc, int want_address,
18852 struct loc_descr_context *context)
18853 {
18854 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18855 if (!ret)
18856 return NULL;
18857 if (ret->dw_loc_next)
18858 {
18859 expansion_failed (loc, NULL_RTX,
18860 "Location list where only loc descriptor needed");
18861 return NULL;
18862 }
18863 return ret->expr;
18864 }
18865
18866 /* Given a value, round it up to the lowest multiple of `boundary'
18867 which is not less than the value itself. */
18868
18869 static inline HOST_WIDE_INT
18870 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18871 {
18872 return (((value + boundary - 1) / boundary) * boundary);
18873 }
18874
18875 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18876 pointer to the declared type for the relevant field variable, or return
18877 `integer_type_node' if the given node turns out to be an
18878 ERROR_MARK node. */
18879
18880 static inline tree
18881 field_type (const_tree decl)
18882 {
18883 tree type;
18884
18885 if (TREE_CODE (decl) == ERROR_MARK)
18886 return integer_type_node;
18887
18888 type = DECL_BIT_FIELD_TYPE (decl);
18889 if (type == NULL_TREE)
18890 type = TREE_TYPE (decl);
18891
18892 return type;
18893 }
18894
18895 /* Given a pointer to a tree node, return the alignment in bits for
18896 it, or else return BITS_PER_WORD if the node actually turns out to
18897 be an ERROR_MARK node. */
18898
18899 static inline unsigned
18900 simple_type_align_in_bits (const_tree type)
18901 {
18902 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18903 }
18904
18905 static inline unsigned
18906 simple_decl_align_in_bits (const_tree decl)
18907 {
18908 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18909 }
18910
18911 /* Return the result of rounding T up to ALIGN. */
18912
18913 static inline offset_int
18914 round_up_to_align (const offset_int &t, unsigned int align)
18915 {
18916 return wi::udiv_trunc (t + align - 1, align) * align;
18917 }
18918
18919 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18920 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18921 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18922 if we fail to return the size in one of these two forms. */
18923
18924 static dw_loc_descr_ref
18925 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18926 {
18927 tree tree_size;
18928 struct loc_descr_context ctx;
18929
18930 /* Return a constant integer in priority, if possible. */
18931 *cst_size = int_size_in_bytes (type);
18932 if (*cst_size != -1)
18933 return NULL;
18934
18935 ctx.context_type = const_cast<tree> (type);
18936 ctx.base_decl = NULL_TREE;
18937 ctx.dpi = NULL;
18938 ctx.placeholder_arg = false;
18939 ctx.placeholder_seen = false;
18940
18941 type = TYPE_MAIN_VARIANT (type);
18942 tree_size = TYPE_SIZE_UNIT (type);
18943 return ((tree_size != NULL_TREE)
18944 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18945 : NULL);
18946 }
18947
18948 /* Helper structure for RECORD_TYPE processing. */
18949 struct vlr_context
18950 {
18951 /* Root RECORD_TYPE. It is needed to generate data member location
18952 descriptions in variable-length records (VLR), but also to cope with
18953 variants, which are composed of nested structures multiplexed with
18954 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18955 function processing a FIELD_DECL, it is required to be non null. */
18956 tree struct_type;
18957 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18958 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18959 this variant part as part of the root record (in storage units). For
18960 regular records, it must be NULL_TREE. */
18961 tree variant_part_offset;
18962 };
18963
18964 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18965 addressed byte of the "containing object" for the given FIELD_DECL. If
18966 possible, return a native constant through CST_OFFSET (in which case NULL is
18967 returned); otherwise return a DWARF expression that computes the offset.
18968
18969 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18970 that offset is, either because the argument turns out to be a pointer to an
18971 ERROR_MARK node, or because the offset expression is too complex for us.
18972
18973 CTX is required: see the comment for VLR_CONTEXT. */
18974
18975 static dw_loc_descr_ref
18976 field_byte_offset (const_tree decl, struct vlr_context *ctx,
18977 HOST_WIDE_INT *cst_offset)
18978 {
18979 tree tree_result;
18980 dw_loc_list_ref loc_result;
18981
18982 *cst_offset = 0;
18983
18984 if (TREE_CODE (decl) == ERROR_MARK)
18985 return NULL;
18986 else
18987 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
18988
18989 /* We cannot handle variable bit offsets at the moment, so abort if it's the
18990 case. */
18991 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
18992 return NULL;
18993
18994 #ifdef PCC_BITFIELD_TYPE_MATTERS
18995 /* We used to handle only constant offsets in all cases. Now, we handle
18996 properly dynamic byte offsets only when PCC bitfield type doesn't
18997 matter. */
18998 if (PCC_BITFIELD_TYPE_MATTERS
18999 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19000 {
19001 offset_int object_offset_in_bits;
19002 offset_int object_offset_in_bytes;
19003 offset_int bitpos_int;
19004 tree type;
19005 tree field_size_tree;
19006 offset_int deepest_bitpos;
19007 offset_int field_size_in_bits;
19008 unsigned int type_align_in_bits;
19009 unsigned int decl_align_in_bits;
19010 offset_int type_size_in_bits;
19011
19012 bitpos_int = wi::to_offset (bit_position (decl));
19013 type = field_type (decl);
19014 type_size_in_bits = offset_int_type_size_in_bits (type);
19015 type_align_in_bits = simple_type_align_in_bits (type);
19016
19017 field_size_tree = DECL_SIZE (decl);
19018
19019 /* The size could be unspecified if there was an error, or for
19020 a flexible array member. */
19021 if (!field_size_tree)
19022 field_size_tree = bitsize_zero_node;
19023
19024 /* If the size of the field is not constant, use the type size. */
19025 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19026 field_size_in_bits = wi::to_offset (field_size_tree);
19027 else
19028 field_size_in_bits = type_size_in_bits;
19029
19030 decl_align_in_bits = simple_decl_align_in_bits (decl);
19031
19032 /* The GCC front-end doesn't make any attempt to keep track of the
19033 starting bit offset (relative to the start of the containing
19034 structure type) of the hypothetical "containing object" for a
19035 bit-field. Thus, when computing the byte offset value for the
19036 start of the "containing object" of a bit-field, we must deduce
19037 this information on our own. This can be rather tricky to do in
19038 some cases. For example, handling the following structure type
19039 definition when compiling for an i386/i486 target (which only
19040 aligns long long's to 32-bit boundaries) can be very tricky:
19041
19042 struct S { int field1; long long field2:31; };
19043
19044 Fortunately, there is a simple rule-of-thumb which can be used
19045 in such cases. When compiling for an i386/i486, GCC will
19046 allocate 8 bytes for the structure shown above. It decides to
19047 do this based upon one simple rule for bit-field allocation.
19048 GCC allocates each "containing object" for each bit-field at
19049 the first (i.e. lowest addressed) legitimate alignment boundary
19050 (based upon the required minimum alignment for the declared
19051 type of the field) which it can possibly use, subject to the
19052 condition that there is still enough available space remaining
19053 in the containing object (when allocated at the selected point)
19054 to fully accommodate all of the bits of the bit-field itself.
19055
19056 This simple rule makes it obvious why GCC allocates 8 bytes for
19057 each object of the structure type shown above. When looking
19058 for a place to allocate the "containing object" for `field2',
19059 the compiler simply tries to allocate a 64-bit "containing
19060 object" at each successive 32-bit boundary (starting at zero)
19061 until it finds a place to allocate that 64- bit field such that
19062 at least 31 contiguous (and previously unallocated) bits remain
19063 within that selected 64 bit field. (As it turns out, for the
19064 example above, the compiler finds it is OK to allocate the
19065 "containing object" 64-bit field at bit-offset zero within the
19066 structure type.)
19067
19068 Here we attempt to work backwards from the limited set of facts
19069 we're given, and we try to deduce from those facts, where GCC
19070 must have believed that the containing object started (within
19071 the structure type). The value we deduce is then used (by the
19072 callers of this routine) to generate DW_AT_location and
19073 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19074 the case of DW_AT_location, regular fields as well). */
19075
19076 /* Figure out the bit-distance from the start of the structure to
19077 the "deepest" bit of the bit-field. */
19078 deepest_bitpos = bitpos_int + field_size_in_bits;
19079
19080 /* This is the tricky part. Use some fancy footwork to deduce
19081 where the lowest addressed bit of the containing object must
19082 be. */
19083 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19084
19085 /* Round up to type_align by default. This works best for
19086 bitfields. */
19087 object_offset_in_bits
19088 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19089
19090 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19091 {
19092 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19093
19094 /* Round up to decl_align instead. */
19095 object_offset_in_bits
19096 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19097 }
19098
19099 object_offset_in_bytes
19100 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19101 if (ctx->variant_part_offset == NULL_TREE)
19102 {
19103 *cst_offset = object_offset_in_bytes.to_shwi ();
19104 return NULL;
19105 }
19106 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19107 }
19108 else
19109 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19110 tree_result = byte_position (decl);
19111
19112 if (ctx->variant_part_offset != NULL_TREE)
19113 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19114 ctx->variant_part_offset, tree_result);
19115
19116 /* If the byte offset is a constant, it's simplier to handle a native
19117 constant rather than a DWARF expression. */
19118 if (TREE_CODE (tree_result) == INTEGER_CST)
19119 {
19120 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19121 return NULL;
19122 }
19123 struct loc_descr_context loc_ctx = {
19124 ctx->struct_type, /* context_type */
19125 NULL_TREE, /* base_decl */
19126 NULL, /* dpi */
19127 false, /* placeholder_arg */
19128 false /* placeholder_seen */
19129 };
19130 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19131
19132 /* We want a DWARF expression: abort if we only have a location list with
19133 multiple elements. */
19134 if (!loc_result || !single_element_loc_list_p (loc_result))
19135 return NULL;
19136 else
19137 return loc_result->expr;
19138 }
19139 \f
19140 /* The following routines define various Dwarf attributes and any data
19141 associated with them. */
19142
19143 /* Add a location description attribute value to a DIE.
19144
19145 This emits location attributes suitable for whole variables and
19146 whole parameters. Note that the location attributes for struct fields are
19147 generated by the routine `data_member_location_attribute' below. */
19148
19149 static inline void
19150 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19151 dw_loc_list_ref descr)
19152 {
19153 bool check_no_locviews = true;
19154 if (descr == 0)
19155 return;
19156 if (single_element_loc_list_p (descr))
19157 add_AT_loc (die, attr_kind, descr->expr);
19158 else
19159 {
19160 add_AT_loc_list (die, attr_kind, descr);
19161 gcc_assert (descr->ll_symbol);
19162 if (attr_kind == DW_AT_location && descr->vl_symbol
19163 && dwarf2out_locviews_in_attribute ())
19164 {
19165 add_AT_view_list (die, DW_AT_GNU_locviews);
19166 check_no_locviews = false;
19167 }
19168 }
19169
19170 if (check_no_locviews)
19171 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19172 }
19173
19174 /* Add DW_AT_accessibility attribute to DIE if needed. */
19175
19176 static void
19177 add_accessibility_attribute (dw_die_ref die, tree decl)
19178 {
19179 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19180 children, otherwise the default is DW_ACCESS_public. In DWARF2
19181 the default has always been DW_ACCESS_public. */
19182 if (TREE_PROTECTED (decl))
19183 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19184 else if (TREE_PRIVATE (decl))
19185 {
19186 if (dwarf_version == 2
19187 || die->die_parent == NULL
19188 || die->die_parent->die_tag != DW_TAG_class_type)
19189 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19190 }
19191 else if (dwarf_version > 2
19192 && die->die_parent
19193 && die->die_parent->die_tag == DW_TAG_class_type)
19194 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19195 }
19196
19197 /* Attach the specialized form of location attribute used for data members of
19198 struct and union types. In the special case of a FIELD_DECL node which
19199 represents a bit-field, the "offset" part of this special location
19200 descriptor must indicate the distance in bytes from the lowest-addressed
19201 byte of the containing struct or union type to the lowest-addressed byte of
19202 the "containing object" for the bit-field. (See the `field_byte_offset'
19203 function above).
19204
19205 For any given bit-field, the "containing object" is a hypothetical object
19206 (of some integral or enum type) within which the given bit-field lives. The
19207 type of this hypothetical "containing object" is always the same as the
19208 declared type of the individual bit-field itself (for GCC anyway... the
19209 DWARF spec doesn't actually mandate this). Note that it is the size (in
19210 bytes) of the hypothetical "containing object" which will be given in the
19211 DW_AT_byte_size attribute for this bit-field. (See the
19212 `byte_size_attribute' function below.) It is also used when calculating the
19213 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19214 function below.)
19215
19216 CTX is required: see the comment for VLR_CONTEXT. */
19217
19218 static void
19219 add_data_member_location_attribute (dw_die_ref die,
19220 tree decl,
19221 struct vlr_context *ctx)
19222 {
19223 HOST_WIDE_INT offset;
19224 dw_loc_descr_ref loc_descr = 0;
19225
19226 if (TREE_CODE (decl) == TREE_BINFO)
19227 {
19228 /* We're working on the TAG_inheritance for a base class. */
19229 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19230 {
19231 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19232 aren't at a fixed offset from all (sub)objects of the same
19233 type. We need to extract the appropriate offset from our
19234 vtable. The following dwarf expression means
19235
19236 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19237
19238 This is specific to the V3 ABI, of course. */
19239
19240 dw_loc_descr_ref tmp;
19241
19242 /* Make a copy of the object address. */
19243 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19244 add_loc_descr (&loc_descr, tmp);
19245
19246 /* Extract the vtable address. */
19247 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19248 add_loc_descr (&loc_descr, tmp);
19249
19250 /* Calculate the address of the offset. */
19251 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19252 gcc_assert (offset < 0);
19253
19254 tmp = int_loc_descriptor (-offset);
19255 add_loc_descr (&loc_descr, tmp);
19256 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19257 add_loc_descr (&loc_descr, tmp);
19258
19259 /* Extract the offset. */
19260 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19261 add_loc_descr (&loc_descr, tmp);
19262
19263 /* Add it to the object address. */
19264 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19265 add_loc_descr (&loc_descr, tmp);
19266 }
19267 else
19268 offset = tree_to_shwi (BINFO_OFFSET (decl));
19269 }
19270 else
19271 {
19272 loc_descr = field_byte_offset (decl, ctx, &offset);
19273
19274 /* If loc_descr is available then we know the field offset is dynamic.
19275 However, GDB does not handle dynamic field offsets very well at the
19276 moment. */
19277 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19278 {
19279 loc_descr = NULL;
19280 offset = 0;
19281 }
19282
19283 /* Data member location evalutation starts with the base address on the
19284 stack. Compute the field offset and add it to this base address. */
19285 else if (loc_descr != NULL)
19286 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19287 }
19288
19289 if (! loc_descr)
19290 {
19291 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19292 e.g. GDB only added support to it in November 2016. For DWARF5
19293 we need newer debug info consumers anyway. We might change this
19294 to dwarf_version >= 4 once most consumers catched up. */
19295 if (dwarf_version >= 5
19296 && TREE_CODE (decl) == FIELD_DECL
19297 && DECL_BIT_FIELD_TYPE (decl))
19298 {
19299 tree off = bit_position (decl);
19300 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19301 {
19302 remove_AT (die, DW_AT_byte_size);
19303 remove_AT (die, DW_AT_bit_offset);
19304 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19305 return;
19306 }
19307 }
19308 if (dwarf_version > 2)
19309 {
19310 /* Don't need to output a location expression, just the constant. */
19311 if (offset < 0)
19312 add_AT_int (die, DW_AT_data_member_location, offset);
19313 else
19314 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19315 return;
19316 }
19317 else
19318 {
19319 enum dwarf_location_atom op;
19320
19321 /* The DWARF2 standard says that we should assume that the structure
19322 address is already on the stack, so we can specify a structure
19323 field address by using DW_OP_plus_uconst. */
19324 op = DW_OP_plus_uconst;
19325 loc_descr = new_loc_descr (op, offset, 0);
19326 }
19327 }
19328
19329 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19330 }
19331
19332 /* Writes integer values to dw_vec_const array. */
19333
19334 static void
19335 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19336 {
19337 while (size != 0)
19338 {
19339 *dest++ = val & 0xff;
19340 val >>= 8;
19341 --size;
19342 }
19343 }
19344
19345 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19346
19347 static HOST_WIDE_INT
19348 extract_int (const unsigned char *src, unsigned int size)
19349 {
19350 HOST_WIDE_INT val = 0;
19351
19352 src += size;
19353 while (size != 0)
19354 {
19355 val <<= 8;
19356 val |= *--src & 0xff;
19357 --size;
19358 }
19359 return val;
19360 }
19361
19362 /* Writes wide_int values to dw_vec_const array. */
19363
19364 static void
19365 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19366 {
19367 int i;
19368
19369 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19370 {
19371 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19372 return;
19373 }
19374
19375 /* We'd have to extend this code to support odd sizes. */
19376 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19377
19378 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19379
19380 if (WORDS_BIG_ENDIAN)
19381 for (i = n - 1; i >= 0; i--)
19382 {
19383 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19384 dest += sizeof (HOST_WIDE_INT);
19385 }
19386 else
19387 for (i = 0; i < n; i++)
19388 {
19389 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19390 dest += sizeof (HOST_WIDE_INT);
19391 }
19392 }
19393
19394 /* Writes floating point values to dw_vec_const array. */
19395
19396 static void
19397 insert_float (const_rtx rtl, unsigned char *array)
19398 {
19399 long val[4];
19400 int i;
19401 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19402
19403 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19404
19405 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19406 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19407 {
19408 insert_int (val[i], 4, array);
19409 array += 4;
19410 }
19411 }
19412
19413 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19414 does not have a "location" either in memory or in a register. These
19415 things can arise in GNU C when a constant is passed as an actual parameter
19416 to an inlined function. They can also arise in C++ where declared
19417 constants do not necessarily get memory "homes". */
19418
19419 static bool
19420 add_const_value_attribute (dw_die_ref die, rtx rtl)
19421 {
19422 switch (GET_CODE (rtl))
19423 {
19424 case CONST_INT:
19425 {
19426 HOST_WIDE_INT val = INTVAL (rtl);
19427
19428 if (val < 0)
19429 add_AT_int (die, DW_AT_const_value, val);
19430 else
19431 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19432 }
19433 return true;
19434
19435 case CONST_WIDE_INT:
19436 {
19437 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19438 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19439 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19440 wide_int w = wi::zext (w1, prec);
19441 add_AT_wide (die, DW_AT_const_value, w);
19442 }
19443 return true;
19444
19445 case CONST_DOUBLE:
19446 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19447 floating-point constant. A CONST_DOUBLE is used whenever the
19448 constant requires more than one word in order to be adequately
19449 represented. */
19450 if (TARGET_SUPPORTS_WIDE_INT == 0
19451 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19452 add_AT_double (die, DW_AT_const_value,
19453 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19454 else
19455 {
19456 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19457 unsigned int length = GET_MODE_SIZE (mode);
19458 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19459
19460 insert_float (rtl, array);
19461 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19462 }
19463 return true;
19464
19465 case CONST_VECTOR:
19466 {
19467 unsigned int length;
19468 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19469 return false;
19470
19471 machine_mode mode = GET_MODE (rtl);
19472 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19473 unsigned char *array
19474 = ggc_vec_alloc<unsigned char> (length * elt_size);
19475 unsigned int i;
19476 unsigned char *p;
19477 machine_mode imode = GET_MODE_INNER (mode);
19478
19479 switch (GET_MODE_CLASS (mode))
19480 {
19481 case MODE_VECTOR_INT:
19482 for (i = 0, p = array; i < length; i++, p += elt_size)
19483 {
19484 rtx elt = CONST_VECTOR_ELT (rtl, i);
19485 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19486 }
19487 break;
19488
19489 case MODE_VECTOR_FLOAT:
19490 for (i = 0, p = array; i < length; i++, p += elt_size)
19491 {
19492 rtx elt = CONST_VECTOR_ELT (rtl, i);
19493 insert_float (elt, p);
19494 }
19495 break;
19496
19497 default:
19498 gcc_unreachable ();
19499 }
19500
19501 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19502 }
19503 return true;
19504
19505 case CONST_STRING:
19506 if (dwarf_version >= 4 || !dwarf_strict)
19507 {
19508 dw_loc_descr_ref loc_result;
19509 resolve_one_addr (&rtl);
19510 rtl_addr:
19511 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19512 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19513 add_AT_loc (die, DW_AT_location, loc_result);
19514 vec_safe_push (used_rtx_array, rtl);
19515 return true;
19516 }
19517 return false;
19518
19519 case CONST:
19520 if (CONSTANT_P (XEXP (rtl, 0)))
19521 return add_const_value_attribute (die, XEXP (rtl, 0));
19522 /* FALLTHROUGH */
19523 case SYMBOL_REF:
19524 if (!const_ok_for_output (rtl))
19525 return false;
19526 /* FALLTHROUGH */
19527 case LABEL_REF:
19528 if (dwarf_version >= 4 || !dwarf_strict)
19529 goto rtl_addr;
19530 return false;
19531
19532 case PLUS:
19533 /* In cases where an inlined instance of an inline function is passed
19534 the address of an `auto' variable (which is local to the caller) we
19535 can get a situation where the DECL_RTL of the artificial local
19536 variable (for the inlining) which acts as a stand-in for the
19537 corresponding formal parameter (of the inline function) will look
19538 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19539 exactly a compile-time constant expression, but it isn't the address
19540 of the (artificial) local variable either. Rather, it represents the
19541 *value* which the artificial local variable always has during its
19542 lifetime. We currently have no way to represent such quasi-constant
19543 values in Dwarf, so for now we just punt and generate nothing. */
19544 return false;
19545
19546 case HIGH:
19547 case CONST_FIXED:
19548 return false;
19549
19550 case MEM:
19551 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19552 && MEM_READONLY_P (rtl)
19553 && GET_MODE (rtl) == BLKmode)
19554 {
19555 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19556 return true;
19557 }
19558 return false;
19559
19560 default:
19561 /* No other kinds of rtx should be possible here. */
19562 gcc_unreachable ();
19563 }
19564 return false;
19565 }
19566
19567 /* Determine whether the evaluation of EXPR references any variables
19568 or functions which aren't otherwise used (and therefore may not be
19569 output). */
19570 static tree
19571 reference_to_unused (tree * tp, int * walk_subtrees,
19572 void * data ATTRIBUTE_UNUSED)
19573 {
19574 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19575 *walk_subtrees = 0;
19576
19577 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19578 && ! TREE_ASM_WRITTEN (*tp))
19579 return *tp;
19580 /* ??? The C++ FE emits debug information for using decls, so
19581 putting gcc_unreachable here falls over. See PR31899. For now
19582 be conservative. */
19583 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19584 return *tp;
19585 else if (VAR_P (*tp))
19586 {
19587 varpool_node *node = varpool_node::get (*tp);
19588 if (!node || !node->definition)
19589 return *tp;
19590 }
19591 else if (TREE_CODE (*tp) == FUNCTION_DECL
19592 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19593 {
19594 /* The call graph machinery must have finished analyzing,
19595 optimizing and gimplifying the CU by now.
19596 So if *TP has no call graph node associated
19597 to it, it means *TP will not be emitted. */
19598 if (!cgraph_node::get (*tp))
19599 return *tp;
19600 }
19601 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19602 return *tp;
19603
19604 return NULL_TREE;
19605 }
19606
19607 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19608 for use in a later add_const_value_attribute call. */
19609
19610 static rtx
19611 rtl_for_decl_init (tree init, tree type)
19612 {
19613 rtx rtl = NULL_RTX;
19614
19615 STRIP_NOPS (init);
19616
19617 /* If a variable is initialized with a string constant without embedded
19618 zeros, build CONST_STRING. */
19619 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19620 {
19621 tree enttype = TREE_TYPE (type);
19622 tree domain = TYPE_DOMAIN (type);
19623 scalar_int_mode mode;
19624
19625 if (is_int_mode (TYPE_MODE (enttype), &mode)
19626 && GET_MODE_SIZE (mode) == 1
19627 && domain
19628 && TYPE_MAX_VALUE (domain)
19629 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19630 && integer_zerop (TYPE_MIN_VALUE (domain))
19631 && compare_tree_int (TYPE_MAX_VALUE (domain),
19632 TREE_STRING_LENGTH (init) - 1) == 0
19633 && ((size_t) TREE_STRING_LENGTH (init)
19634 == strlen (TREE_STRING_POINTER (init)) + 1))
19635 {
19636 rtl = gen_rtx_CONST_STRING (VOIDmode,
19637 ggc_strdup (TREE_STRING_POINTER (init)));
19638 rtl = gen_rtx_MEM (BLKmode, rtl);
19639 MEM_READONLY_P (rtl) = 1;
19640 }
19641 }
19642 /* Other aggregates, and complex values, could be represented using
19643 CONCAT: FIXME! */
19644 else if (AGGREGATE_TYPE_P (type)
19645 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19646 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19647 || TREE_CODE (type) == COMPLEX_TYPE)
19648 ;
19649 /* Vectors only work if their mode is supported by the target.
19650 FIXME: generic vectors ought to work too. */
19651 else if (TREE_CODE (type) == VECTOR_TYPE
19652 && !VECTOR_MODE_P (TYPE_MODE (type)))
19653 ;
19654 /* If the initializer is something that we know will expand into an
19655 immediate RTL constant, expand it now. We must be careful not to
19656 reference variables which won't be output. */
19657 else if (initializer_constant_valid_p (init, type)
19658 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19659 {
19660 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19661 possible. */
19662 if (TREE_CODE (type) == VECTOR_TYPE)
19663 switch (TREE_CODE (init))
19664 {
19665 case VECTOR_CST:
19666 break;
19667 case CONSTRUCTOR:
19668 if (TREE_CONSTANT (init))
19669 {
19670 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19671 bool constant_p = true;
19672 tree value;
19673 unsigned HOST_WIDE_INT ix;
19674
19675 /* Even when ctor is constant, it might contain non-*_CST
19676 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19677 belong into VECTOR_CST nodes. */
19678 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19679 if (!CONSTANT_CLASS_P (value))
19680 {
19681 constant_p = false;
19682 break;
19683 }
19684
19685 if (constant_p)
19686 {
19687 init = build_vector_from_ctor (type, elts);
19688 break;
19689 }
19690 }
19691 /* FALLTHRU */
19692
19693 default:
19694 return NULL;
19695 }
19696
19697 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19698
19699 /* If expand_expr returns a MEM, it wasn't immediate. */
19700 gcc_assert (!rtl || !MEM_P (rtl));
19701 }
19702
19703 return rtl;
19704 }
19705
19706 /* Generate RTL for the variable DECL to represent its location. */
19707
19708 static rtx
19709 rtl_for_decl_location (tree decl)
19710 {
19711 rtx rtl;
19712
19713 /* Here we have to decide where we are going to say the parameter "lives"
19714 (as far as the debugger is concerned). We only have a couple of
19715 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19716
19717 DECL_RTL normally indicates where the parameter lives during most of the
19718 activation of the function. If optimization is enabled however, this
19719 could be either NULL or else a pseudo-reg. Both of those cases indicate
19720 that the parameter doesn't really live anywhere (as far as the code
19721 generation parts of GCC are concerned) during most of the function's
19722 activation. That will happen (for example) if the parameter is never
19723 referenced within the function.
19724
19725 We could just generate a location descriptor here for all non-NULL
19726 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19727 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19728 where DECL_RTL is NULL or is a pseudo-reg.
19729
19730 Note however that we can only get away with using DECL_INCOMING_RTL as
19731 a backup substitute for DECL_RTL in certain limited cases. In cases
19732 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19733 we can be sure that the parameter was passed using the same type as it is
19734 declared to have within the function, and that its DECL_INCOMING_RTL
19735 points us to a place where a value of that type is passed.
19736
19737 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19738 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19739 because in these cases DECL_INCOMING_RTL points us to a value of some
19740 type which is *different* from the type of the parameter itself. Thus,
19741 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19742 such cases, the debugger would end up (for example) trying to fetch a
19743 `float' from a place which actually contains the first part of a
19744 `double'. That would lead to really incorrect and confusing
19745 output at debug-time.
19746
19747 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19748 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19749 are a couple of exceptions however. On little-endian machines we can
19750 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19751 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19752 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19753 when (on a little-endian machine) a non-prototyped function has a
19754 parameter declared to be of type `short' or `char'. In such cases,
19755 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19756 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19757 passed `int' value. If the debugger then uses that address to fetch
19758 a `short' or a `char' (on a little-endian machine) the result will be
19759 the correct data, so we allow for such exceptional cases below.
19760
19761 Note that our goal here is to describe the place where the given formal
19762 parameter lives during most of the function's activation (i.e. between the
19763 end of the prologue and the start of the epilogue). We'll do that as best
19764 as we can. Note however that if the given formal parameter is modified
19765 sometime during the execution of the function, then a stack backtrace (at
19766 debug-time) will show the function as having been called with the *new*
19767 value rather than the value which was originally passed in. This happens
19768 rarely enough that it is not a major problem, but it *is* a problem, and
19769 I'd like to fix it.
19770
19771 A future version of dwarf2out.c may generate two additional attributes for
19772 any given DW_TAG_formal_parameter DIE which will describe the "passed
19773 type" and the "passed location" for the given formal parameter in addition
19774 to the attributes we now generate to indicate the "declared type" and the
19775 "active location" for each parameter. This additional set of attributes
19776 could be used by debuggers for stack backtraces. Separately, note that
19777 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19778 This happens (for example) for inlined-instances of inline function formal
19779 parameters which are never referenced. This really shouldn't be
19780 happening. All PARM_DECL nodes should get valid non-NULL
19781 DECL_INCOMING_RTL values. FIXME. */
19782
19783 /* Use DECL_RTL as the "location" unless we find something better. */
19784 rtl = DECL_RTL_IF_SET (decl);
19785
19786 /* When generating abstract instances, ignore everything except
19787 constants, symbols living in memory, and symbols living in
19788 fixed registers. */
19789 if (! reload_completed)
19790 {
19791 if (rtl
19792 && (CONSTANT_P (rtl)
19793 || (MEM_P (rtl)
19794 && CONSTANT_P (XEXP (rtl, 0)))
19795 || (REG_P (rtl)
19796 && VAR_P (decl)
19797 && TREE_STATIC (decl))))
19798 {
19799 rtl = targetm.delegitimize_address (rtl);
19800 return rtl;
19801 }
19802 rtl = NULL_RTX;
19803 }
19804 else if (TREE_CODE (decl) == PARM_DECL)
19805 {
19806 if (rtl == NULL_RTX
19807 || is_pseudo_reg (rtl)
19808 || (MEM_P (rtl)
19809 && is_pseudo_reg (XEXP (rtl, 0))
19810 && DECL_INCOMING_RTL (decl)
19811 && MEM_P (DECL_INCOMING_RTL (decl))
19812 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19813 {
19814 tree declared_type = TREE_TYPE (decl);
19815 tree passed_type = DECL_ARG_TYPE (decl);
19816 machine_mode dmode = TYPE_MODE (declared_type);
19817 machine_mode pmode = TYPE_MODE (passed_type);
19818
19819 /* This decl represents a formal parameter which was optimized out.
19820 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19821 all cases where (rtl == NULL_RTX) just below. */
19822 if (dmode == pmode)
19823 rtl = DECL_INCOMING_RTL (decl);
19824 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19825 && SCALAR_INT_MODE_P (dmode)
19826 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19827 && DECL_INCOMING_RTL (decl))
19828 {
19829 rtx inc = DECL_INCOMING_RTL (decl);
19830 if (REG_P (inc))
19831 rtl = inc;
19832 else if (MEM_P (inc))
19833 {
19834 if (BYTES_BIG_ENDIAN)
19835 rtl = adjust_address_nv (inc, dmode,
19836 GET_MODE_SIZE (pmode)
19837 - GET_MODE_SIZE (dmode));
19838 else
19839 rtl = inc;
19840 }
19841 }
19842 }
19843
19844 /* If the parm was passed in registers, but lives on the stack, then
19845 make a big endian correction if the mode of the type of the
19846 parameter is not the same as the mode of the rtl. */
19847 /* ??? This is the same series of checks that are made in dbxout.c before
19848 we reach the big endian correction code there. It isn't clear if all
19849 of these checks are necessary here, but keeping them all is the safe
19850 thing to do. */
19851 else if (MEM_P (rtl)
19852 && XEXP (rtl, 0) != const0_rtx
19853 && ! CONSTANT_P (XEXP (rtl, 0))
19854 /* Not passed in memory. */
19855 && !MEM_P (DECL_INCOMING_RTL (decl))
19856 /* Not passed by invisible reference. */
19857 && (!REG_P (XEXP (rtl, 0))
19858 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19859 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19860 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19861 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19862 #endif
19863 )
19864 /* Big endian correction check. */
19865 && BYTES_BIG_ENDIAN
19866 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19867 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19868 UNITS_PER_WORD))
19869 {
19870 machine_mode addr_mode = get_address_mode (rtl);
19871 poly_int64 offset = (UNITS_PER_WORD
19872 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19873
19874 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19875 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19876 }
19877 }
19878 else if (VAR_P (decl)
19879 && rtl
19880 && MEM_P (rtl)
19881 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19882 {
19883 machine_mode addr_mode = get_address_mode (rtl);
19884 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19885 GET_MODE (rtl));
19886
19887 /* If a variable is declared "register" yet is smaller than
19888 a register, then if we store the variable to memory, it
19889 looks like we're storing a register-sized value, when in
19890 fact we are not. We need to adjust the offset of the
19891 storage location to reflect the actual value's bytes,
19892 else gdb will not be able to display it. */
19893 if (maybe_ne (offset, 0))
19894 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19895 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19896 }
19897
19898 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19899 and will have been substituted directly into all expressions that use it.
19900 C does not have such a concept, but C++ and other languages do. */
19901 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19902 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19903
19904 if (rtl)
19905 rtl = targetm.delegitimize_address (rtl);
19906
19907 /* If we don't look past the constant pool, we risk emitting a
19908 reference to a constant pool entry that isn't referenced from
19909 code, and thus is not emitted. */
19910 if (rtl)
19911 rtl = avoid_constant_pool_reference (rtl);
19912
19913 /* Try harder to get a rtl. If this symbol ends up not being emitted
19914 in the current CU, resolve_addr will remove the expression referencing
19915 it. */
19916 if (rtl == NULL_RTX
19917 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19918 && VAR_P (decl)
19919 && !DECL_EXTERNAL (decl)
19920 && TREE_STATIC (decl)
19921 && DECL_NAME (decl)
19922 && !DECL_HARD_REGISTER (decl)
19923 && DECL_MODE (decl) != VOIDmode)
19924 {
19925 rtl = make_decl_rtl_for_debug (decl);
19926 if (!MEM_P (rtl)
19927 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19928 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19929 rtl = NULL_RTX;
19930 }
19931
19932 return rtl;
19933 }
19934
19935 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19936 returned. If so, the decl for the COMMON block is returned, and the
19937 value is the offset into the common block for the symbol. */
19938
19939 static tree
19940 fortran_common (tree decl, HOST_WIDE_INT *value)
19941 {
19942 tree val_expr, cvar;
19943 machine_mode mode;
19944 poly_int64 bitsize, bitpos;
19945 tree offset;
19946 HOST_WIDE_INT cbitpos;
19947 int unsignedp, reversep, volatilep = 0;
19948
19949 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19950 it does not have a value (the offset into the common area), or if it
19951 is thread local (as opposed to global) then it isn't common, and shouldn't
19952 be handled as such. */
19953 if (!VAR_P (decl)
19954 || !TREE_STATIC (decl)
19955 || !DECL_HAS_VALUE_EXPR_P (decl)
19956 || !is_fortran ())
19957 return NULL_TREE;
19958
19959 val_expr = DECL_VALUE_EXPR (decl);
19960 if (TREE_CODE (val_expr) != COMPONENT_REF)
19961 return NULL_TREE;
19962
19963 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19964 &unsignedp, &reversep, &volatilep);
19965
19966 if (cvar == NULL_TREE
19967 || !VAR_P (cvar)
19968 || DECL_ARTIFICIAL (cvar)
19969 || !TREE_PUBLIC (cvar)
19970 /* We don't expect to have to cope with variable offsets,
19971 since at present all static data must have a constant size. */
19972 || !bitpos.is_constant (&cbitpos))
19973 return NULL_TREE;
19974
19975 *value = 0;
19976 if (offset != NULL)
19977 {
19978 if (!tree_fits_shwi_p (offset))
19979 return NULL_TREE;
19980 *value = tree_to_shwi (offset);
19981 }
19982 if (cbitpos != 0)
19983 *value += cbitpos / BITS_PER_UNIT;
19984
19985 return cvar;
19986 }
19987
19988 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
19989 data attribute for a variable or a parameter. We generate the
19990 DW_AT_const_value attribute only in those cases where the given variable
19991 or parameter does not have a true "location" either in memory or in a
19992 register. This can happen (for example) when a constant is passed as an
19993 actual argument in a call to an inline function. (It's possible that
19994 these things can crop up in other ways also.) Note that one type of
19995 constant value which can be passed into an inlined function is a constant
19996 pointer. This can happen for example if an actual argument in an inlined
19997 function call evaluates to a compile-time constant address.
19998
19999 CACHE_P is true if it is worth caching the location list for DECL,
20000 so that future calls can reuse it rather than regenerate it from scratch.
20001 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20002 since we will need to refer to them each time the function is inlined. */
20003
20004 static bool
20005 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20006 {
20007 rtx rtl;
20008 dw_loc_list_ref list;
20009 var_loc_list *loc_list;
20010 cached_dw_loc_list *cache;
20011
20012 if (early_dwarf)
20013 return false;
20014
20015 if (TREE_CODE (decl) == ERROR_MARK)
20016 return false;
20017
20018 if (get_AT (die, DW_AT_location)
20019 || get_AT (die, DW_AT_const_value))
20020 return true;
20021
20022 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20023 || TREE_CODE (decl) == RESULT_DECL);
20024
20025 /* Try to get some constant RTL for this decl, and use that as the value of
20026 the location. */
20027
20028 rtl = rtl_for_decl_location (decl);
20029 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20030 && add_const_value_attribute (die, rtl))
20031 return true;
20032
20033 /* See if we have single element location list that is equivalent to
20034 a constant value. That way we are better to use add_const_value_attribute
20035 rather than expanding constant value equivalent. */
20036 loc_list = lookup_decl_loc (decl);
20037 if (loc_list
20038 && loc_list->first
20039 && loc_list->first->next == NULL
20040 && NOTE_P (loc_list->first->loc)
20041 && NOTE_VAR_LOCATION (loc_list->first->loc)
20042 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20043 {
20044 struct var_loc_node *node;
20045
20046 node = loc_list->first;
20047 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20048 if (GET_CODE (rtl) == EXPR_LIST)
20049 rtl = XEXP (rtl, 0);
20050 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20051 && add_const_value_attribute (die, rtl))
20052 return true;
20053 }
20054 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20055 list several times. See if we've already cached the contents. */
20056 list = NULL;
20057 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20058 cache_p = false;
20059 if (cache_p)
20060 {
20061 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20062 if (cache)
20063 list = cache->loc_list;
20064 }
20065 if (list == NULL)
20066 {
20067 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20068 NULL);
20069 /* It is usually worth caching this result if the decl is from
20070 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20071 if (cache_p && list && list->dw_loc_next)
20072 {
20073 cached_dw_loc_list **slot
20074 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20075 DECL_UID (decl),
20076 INSERT);
20077 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20078 cache->decl_id = DECL_UID (decl);
20079 cache->loc_list = list;
20080 *slot = cache;
20081 }
20082 }
20083 if (list)
20084 {
20085 add_AT_location_description (die, DW_AT_location, list);
20086 return true;
20087 }
20088 /* None of that worked, so it must not really have a location;
20089 try adding a constant value attribute from the DECL_INITIAL. */
20090 return tree_add_const_value_attribute_for_decl (die, decl);
20091 }
20092
20093 /* Helper function for tree_add_const_value_attribute. Natively encode
20094 initializer INIT into an array. Return true if successful. */
20095
20096 static bool
20097 native_encode_initializer (tree init, unsigned char *array, int size)
20098 {
20099 tree type;
20100
20101 if (init == NULL_TREE)
20102 return false;
20103
20104 STRIP_NOPS (init);
20105 switch (TREE_CODE (init))
20106 {
20107 case STRING_CST:
20108 type = TREE_TYPE (init);
20109 if (TREE_CODE (type) == ARRAY_TYPE)
20110 {
20111 tree enttype = TREE_TYPE (type);
20112 scalar_int_mode mode;
20113
20114 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20115 || GET_MODE_SIZE (mode) != 1)
20116 return false;
20117 if (int_size_in_bytes (type) != size)
20118 return false;
20119 if (size > TREE_STRING_LENGTH (init))
20120 {
20121 memcpy (array, TREE_STRING_POINTER (init),
20122 TREE_STRING_LENGTH (init));
20123 memset (array + TREE_STRING_LENGTH (init),
20124 '\0', size - TREE_STRING_LENGTH (init));
20125 }
20126 else
20127 memcpy (array, TREE_STRING_POINTER (init), size);
20128 return true;
20129 }
20130 return false;
20131 case CONSTRUCTOR:
20132 type = TREE_TYPE (init);
20133 if (int_size_in_bytes (type) != size)
20134 return false;
20135 if (TREE_CODE (type) == ARRAY_TYPE)
20136 {
20137 HOST_WIDE_INT min_index;
20138 unsigned HOST_WIDE_INT cnt;
20139 int curpos = 0, fieldsize;
20140 constructor_elt *ce;
20141
20142 if (TYPE_DOMAIN (type) == NULL_TREE
20143 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20144 return false;
20145
20146 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20147 if (fieldsize <= 0)
20148 return false;
20149
20150 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20151 memset (array, '\0', size);
20152 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20153 {
20154 tree val = ce->value;
20155 tree index = ce->index;
20156 int pos = curpos;
20157 if (index && TREE_CODE (index) == RANGE_EXPR)
20158 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20159 * fieldsize;
20160 else if (index)
20161 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20162
20163 if (val)
20164 {
20165 STRIP_NOPS (val);
20166 if (!native_encode_initializer (val, array + pos, fieldsize))
20167 return false;
20168 }
20169 curpos = pos + fieldsize;
20170 if (index && TREE_CODE (index) == RANGE_EXPR)
20171 {
20172 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20173 - tree_to_shwi (TREE_OPERAND (index, 0));
20174 while (count-- > 0)
20175 {
20176 if (val)
20177 memcpy (array + curpos, array + pos, fieldsize);
20178 curpos += fieldsize;
20179 }
20180 }
20181 gcc_assert (curpos <= size);
20182 }
20183 return true;
20184 }
20185 else if (TREE_CODE (type) == RECORD_TYPE
20186 || TREE_CODE (type) == UNION_TYPE)
20187 {
20188 tree field = NULL_TREE;
20189 unsigned HOST_WIDE_INT cnt;
20190 constructor_elt *ce;
20191
20192 if (int_size_in_bytes (type) != size)
20193 return false;
20194
20195 if (TREE_CODE (type) == RECORD_TYPE)
20196 field = TYPE_FIELDS (type);
20197
20198 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20199 {
20200 tree val = ce->value;
20201 int pos, fieldsize;
20202
20203 if (ce->index != 0)
20204 field = ce->index;
20205
20206 if (val)
20207 STRIP_NOPS (val);
20208
20209 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20210 return false;
20211
20212 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20213 && TYPE_DOMAIN (TREE_TYPE (field))
20214 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20215 return false;
20216 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20217 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20218 return false;
20219 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20220 pos = int_byte_position (field);
20221 gcc_assert (pos + fieldsize <= size);
20222 if (val && fieldsize != 0
20223 && !native_encode_initializer (val, array + pos, fieldsize))
20224 return false;
20225 }
20226 return true;
20227 }
20228 return false;
20229 case VIEW_CONVERT_EXPR:
20230 case NON_LVALUE_EXPR:
20231 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20232 default:
20233 return native_encode_expr (init, array, size) == size;
20234 }
20235 }
20236
20237 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20238 attribute is the const value T. */
20239
20240 static bool
20241 tree_add_const_value_attribute (dw_die_ref die, tree t)
20242 {
20243 tree init;
20244 tree type = TREE_TYPE (t);
20245 rtx rtl;
20246
20247 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20248 return false;
20249
20250 init = t;
20251 gcc_assert (!DECL_P (init));
20252
20253 if (TREE_CODE (init) == INTEGER_CST)
20254 {
20255 if (tree_fits_uhwi_p (init))
20256 {
20257 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20258 return true;
20259 }
20260 if (tree_fits_shwi_p (init))
20261 {
20262 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20263 return true;
20264 }
20265 }
20266 if (! early_dwarf)
20267 {
20268 rtl = rtl_for_decl_init (init, type);
20269 if (rtl)
20270 return add_const_value_attribute (die, rtl);
20271 }
20272 /* If the host and target are sane, try harder. */
20273 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20274 && initializer_constant_valid_p (init, type))
20275 {
20276 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20277 if (size > 0 && (int) size == size)
20278 {
20279 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20280
20281 if (native_encode_initializer (init, array, size))
20282 {
20283 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20284 return true;
20285 }
20286 ggc_free (array);
20287 }
20288 }
20289 return false;
20290 }
20291
20292 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20293 attribute is the const value of T, where T is an integral constant
20294 variable with static storage duration
20295 (so it can't be a PARM_DECL or a RESULT_DECL). */
20296
20297 static bool
20298 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20299 {
20300
20301 if (!decl
20302 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20303 || (VAR_P (decl) && !TREE_STATIC (decl)))
20304 return false;
20305
20306 if (TREE_READONLY (decl)
20307 && ! TREE_THIS_VOLATILE (decl)
20308 && DECL_INITIAL (decl))
20309 /* OK */;
20310 else
20311 return false;
20312
20313 /* Don't add DW_AT_const_value if abstract origin already has one. */
20314 if (get_AT (var_die, DW_AT_const_value))
20315 return false;
20316
20317 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20318 }
20319
20320 /* Convert the CFI instructions for the current function into a
20321 location list. This is used for DW_AT_frame_base when we targeting
20322 a dwarf2 consumer that does not support the dwarf3
20323 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20324 expressions. */
20325
20326 static dw_loc_list_ref
20327 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20328 {
20329 int ix;
20330 dw_fde_ref fde;
20331 dw_loc_list_ref list, *list_tail;
20332 dw_cfi_ref cfi;
20333 dw_cfa_location last_cfa, next_cfa;
20334 const char *start_label, *last_label, *section;
20335 dw_cfa_location remember;
20336
20337 fde = cfun->fde;
20338 gcc_assert (fde != NULL);
20339
20340 section = secname_for_decl (current_function_decl);
20341 list_tail = &list;
20342 list = NULL;
20343
20344 memset (&next_cfa, 0, sizeof (next_cfa));
20345 next_cfa.reg = INVALID_REGNUM;
20346 remember = next_cfa;
20347
20348 start_label = fde->dw_fde_begin;
20349
20350 /* ??? Bald assumption that the CIE opcode list does not contain
20351 advance opcodes. */
20352 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20353 lookup_cfa_1 (cfi, &next_cfa, &remember);
20354
20355 last_cfa = next_cfa;
20356 last_label = start_label;
20357
20358 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20359 {
20360 /* If the first partition contained no CFI adjustments, the
20361 CIE opcodes apply to the whole first partition. */
20362 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20363 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20364 list_tail =&(*list_tail)->dw_loc_next;
20365 start_label = last_label = fde->dw_fde_second_begin;
20366 }
20367
20368 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20369 {
20370 switch (cfi->dw_cfi_opc)
20371 {
20372 case DW_CFA_set_loc:
20373 case DW_CFA_advance_loc1:
20374 case DW_CFA_advance_loc2:
20375 case DW_CFA_advance_loc4:
20376 if (!cfa_equal_p (&last_cfa, &next_cfa))
20377 {
20378 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20379 start_label, 0, last_label, 0, section);
20380
20381 list_tail = &(*list_tail)->dw_loc_next;
20382 last_cfa = next_cfa;
20383 start_label = last_label;
20384 }
20385 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20386 break;
20387
20388 case DW_CFA_advance_loc:
20389 /* The encoding is complex enough that we should never emit this. */
20390 gcc_unreachable ();
20391
20392 default:
20393 lookup_cfa_1 (cfi, &next_cfa, &remember);
20394 break;
20395 }
20396 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20397 {
20398 if (!cfa_equal_p (&last_cfa, &next_cfa))
20399 {
20400 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20401 start_label, 0, last_label, 0, section);
20402
20403 list_tail = &(*list_tail)->dw_loc_next;
20404 last_cfa = next_cfa;
20405 start_label = last_label;
20406 }
20407 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20408 start_label, 0, fde->dw_fde_end, 0, section);
20409 list_tail = &(*list_tail)->dw_loc_next;
20410 start_label = last_label = fde->dw_fde_second_begin;
20411 }
20412 }
20413
20414 if (!cfa_equal_p (&last_cfa, &next_cfa))
20415 {
20416 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20417 start_label, 0, last_label, 0, section);
20418 list_tail = &(*list_tail)->dw_loc_next;
20419 start_label = last_label;
20420 }
20421
20422 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20423 start_label, 0,
20424 fde->dw_fde_second_begin
20425 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20426 section);
20427
20428 maybe_gen_llsym (list);
20429
20430 return list;
20431 }
20432
20433 /* Compute a displacement from the "steady-state frame pointer" to the
20434 frame base (often the same as the CFA), and store it in
20435 frame_pointer_fb_offset. OFFSET is added to the displacement
20436 before the latter is negated. */
20437
20438 static void
20439 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20440 {
20441 rtx reg, elim;
20442
20443 #ifdef FRAME_POINTER_CFA_OFFSET
20444 reg = frame_pointer_rtx;
20445 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20446 #else
20447 reg = arg_pointer_rtx;
20448 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20449 #endif
20450
20451 elim = (ira_use_lra_p
20452 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20453 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20454 elim = strip_offset_and_add (elim, &offset);
20455
20456 frame_pointer_fb_offset = -offset;
20457
20458 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20459 in which to eliminate. This is because it's stack pointer isn't
20460 directly accessible as a register within the ISA. To work around
20461 this, assume that while we cannot provide a proper value for
20462 frame_pointer_fb_offset, we won't need one either. */
20463 frame_pointer_fb_offset_valid
20464 = ((SUPPORTS_STACK_ALIGNMENT
20465 && (elim == hard_frame_pointer_rtx
20466 || elim == stack_pointer_rtx))
20467 || elim == (frame_pointer_needed
20468 ? hard_frame_pointer_rtx
20469 : stack_pointer_rtx));
20470 }
20471
20472 /* Generate a DW_AT_name attribute given some string value to be included as
20473 the value of the attribute. */
20474
20475 static void
20476 add_name_attribute (dw_die_ref die, const char *name_string)
20477 {
20478 if (name_string != NULL && *name_string != 0)
20479 {
20480 if (demangle_name_func)
20481 name_string = (*demangle_name_func) (name_string);
20482
20483 add_AT_string (die, DW_AT_name, name_string);
20484 }
20485 }
20486
20487 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20488 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20489 of TYPE accordingly.
20490
20491 ??? This is a temporary measure until after we're able to generate
20492 regular DWARF for the complex Ada type system. */
20493
20494 static void
20495 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20496 dw_die_ref context_die)
20497 {
20498 tree dtype;
20499 dw_die_ref dtype_die;
20500
20501 if (!lang_hooks.types.descriptive_type)
20502 return;
20503
20504 dtype = lang_hooks.types.descriptive_type (type);
20505 if (!dtype)
20506 return;
20507
20508 dtype_die = lookup_type_die (dtype);
20509 if (!dtype_die)
20510 {
20511 gen_type_die (dtype, context_die);
20512 dtype_die = lookup_type_die (dtype);
20513 gcc_assert (dtype_die);
20514 }
20515
20516 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20517 }
20518
20519 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20520
20521 static const char *
20522 comp_dir_string (void)
20523 {
20524 const char *wd;
20525 char *wd1;
20526 static const char *cached_wd = NULL;
20527
20528 if (cached_wd != NULL)
20529 return cached_wd;
20530
20531 wd = get_src_pwd ();
20532 if (wd == NULL)
20533 return NULL;
20534
20535 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20536 {
20537 int wdlen;
20538
20539 wdlen = strlen (wd);
20540 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20541 strcpy (wd1, wd);
20542 wd1 [wdlen] = DIR_SEPARATOR;
20543 wd1 [wdlen + 1] = 0;
20544 wd = wd1;
20545 }
20546
20547 cached_wd = remap_debug_filename (wd);
20548 return cached_wd;
20549 }
20550
20551 /* Generate a DW_AT_comp_dir attribute for DIE. */
20552
20553 static void
20554 add_comp_dir_attribute (dw_die_ref die)
20555 {
20556 const char * wd = comp_dir_string ();
20557 if (wd != NULL)
20558 add_AT_string (die, DW_AT_comp_dir, wd);
20559 }
20560
20561 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20562 pointer computation, ...), output a representation for that bound according
20563 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20564 loc_list_from_tree for the meaning of CONTEXT. */
20565
20566 static void
20567 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20568 int forms, struct loc_descr_context *context)
20569 {
20570 dw_die_ref context_die, decl_die;
20571 dw_loc_list_ref list;
20572 bool strip_conversions = true;
20573 bool placeholder_seen = false;
20574
20575 while (strip_conversions)
20576 switch (TREE_CODE (value))
20577 {
20578 case ERROR_MARK:
20579 case SAVE_EXPR:
20580 return;
20581
20582 CASE_CONVERT:
20583 case VIEW_CONVERT_EXPR:
20584 value = TREE_OPERAND (value, 0);
20585 break;
20586
20587 default:
20588 strip_conversions = false;
20589 break;
20590 }
20591
20592 /* If possible and permitted, output the attribute as a constant. */
20593 if ((forms & dw_scalar_form_constant) != 0
20594 && TREE_CODE (value) == INTEGER_CST)
20595 {
20596 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20597
20598 /* If HOST_WIDE_INT is big enough then represent the bound as
20599 a constant value. We need to choose a form based on
20600 whether the type is signed or unsigned. We cannot just
20601 call add_AT_unsigned if the value itself is positive
20602 (add_AT_unsigned might add the unsigned value encoded as
20603 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20604 bounds type and then sign extend any unsigned values found
20605 for signed types. This is needed only for
20606 DW_AT_{lower,upper}_bound, since for most other attributes,
20607 consumers will treat DW_FORM_data[1248] as unsigned values,
20608 regardless of the underlying type. */
20609 if (prec <= HOST_BITS_PER_WIDE_INT
20610 || tree_fits_uhwi_p (value))
20611 {
20612 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20613 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20614 else
20615 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20616 }
20617 else
20618 /* Otherwise represent the bound as an unsigned value with
20619 the precision of its type. The precision and signedness
20620 of the type will be necessary to re-interpret it
20621 unambiguously. */
20622 add_AT_wide (die, attr, wi::to_wide (value));
20623 return;
20624 }
20625
20626 /* Otherwise, if it's possible and permitted too, output a reference to
20627 another DIE. */
20628 if ((forms & dw_scalar_form_reference) != 0)
20629 {
20630 tree decl = NULL_TREE;
20631
20632 /* Some type attributes reference an outer type. For instance, the upper
20633 bound of an array may reference an embedding record (this happens in
20634 Ada). */
20635 if (TREE_CODE (value) == COMPONENT_REF
20636 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20637 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20638 decl = TREE_OPERAND (value, 1);
20639
20640 else if (VAR_P (value)
20641 || TREE_CODE (value) == PARM_DECL
20642 || TREE_CODE (value) == RESULT_DECL)
20643 decl = value;
20644
20645 if (decl != NULL_TREE)
20646 {
20647 dw_die_ref decl_die = lookup_decl_die (decl);
20648
20649 /* ??? Can this happen, or should the variable have been bound
20650 first? Probably it can, since I imagine that we try to create
20651 the types of parameters in the order in which they exist in
20652 the list, and won't have created a forward reference to a
20653 later parameter. */
20654 if (decl_die != NULL)
20655 {
20656 add_AT_die_ref (die, attr, decl_die);
20657 return;
20658 }
20659 }
20660 }
20661
20662 /* Last chance: try to create a stack operation procedure to evaluate the
20663 value. Do nothing if even that is not possible or permitted. */
20664 if ((forms & dw_scalar_form_exprloc) == 0)
20665 return;
20666
20667 list = loc_list_from_tree (value, 2, context);
20668 if (context && context->placeholder_arg)
20669 {
20670 placeholder_seen = context->placeholder_seen;
20671 context->placeholder_seen = false;
20672 }
20673 if (list == NULL || single_element_loc_list_p (list))
20674 {
20675 /* If this attribute is not a reference nor constant, it is
20676 a DWARF expression rather than location description. For that
20677 loc_list_from_tree (value, 0, &context) is needed. */
20678 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20679 if (list2 && single_element_loc_list_p (list2))
20680 {
20681 if (placeholder_seen)
20682 {
20683 struct dwarf_procedure_info dpi;
20684 dpi.fndecl = NULL_TREE;
20685 dpi.args_count = 1;
20686 if (!resolve_args_picking (list2->expr, 1, &dpi))
20687 return;
20688 }
20689 add_AT_loc (die, attr, list2->expr);
20690 return;
20691 }
20692 }
20693
20694 /* If that failed to give a single element location list, fall back to
20695 outputting this as a reference... still if permitted. */
20696 if (list == NULL
20697 || (forms & dw_scalar_form_reference) == 0
20698 || placeholder_seen)
20699 return;
20700
20701 if (current_function_decl == 0)
20702 context_die = comp_unit_die ();
20703 else
20704 context_die = lookup_decl_die (current_function_decl);
20705
20706 decl_die = new_die (DW_TAG_variable, context_die, value);
20707 add_AT_flag (decl_die, DW_AT_artificial, 1);
20708 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20709 context_die);
20710 add_AT_location_description (decl_die, DW_AT_location, list);
20711 add_AT_die_ref (die, attr, decl_die);
20712 }
20713
20714 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20715 default. */
20716
20717 static int
20718 lower_bound_default (void)
20719 {
20720 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20721 {
20722 case DW_LANG_C:
20723 case DW_LANG_C89:
20724 case DW_LANG_C99:
20725 case DW_LANG_C11:
20726 case DW_LANG_C_plus_plus:
20727 case DW_LANG_C_plus_plus_11:
20728 case DW_LANG_C_plus_plus_14:
20729 case DW_LANG_ObjC:
20730 case DW_LANG_ObjC_plus_plus:
20731 return 0;
20732 case DW_LANG_Fortran77:
20733 case DW_LANG_Fortran90:
20734 case DW_LANG_Fortran95:
20735 case DW_LANG_Fortran03:
20736 case DW_LANG_Fortran08:
20737 return 1;
20738 case DW_LANG_UPC:
20739 case DW_LANG_D:
20740 case DW_LANG_Python:
20741 return dwarf_version >= 4 ? 0 : -1;
20742 case DW_LANG_Ada95:
20743 case DW_LANG_Ada83:
20744 case DW_LANG_Cobol74:
20745 case DW_LANG_Cobol85:
20746 case DW_LANG_Modula2:
20747 case DW_LANG_PLI:
20748 return dwarf_version >= 4 ? 1 : -1;
20749 default:
20750 return -1;
20751 }
20752 }
20753
20754 /* Given a tree node describing an array bound (either lower or upper) output
20755 a representation for that bound. */
20756
20757 static void
20758 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20759 tree bound, struct loc_descr_context *context)
20760 {
20761 int dflt;
20762
20763 while (1)
20764 switch (TREE_CODE (bound))
20765 {
20766 /* Strip all conversions. */
20767 CASE_CONVERT:
20768 case VIEW_CONVERT_EXPR:
20769 bound = TREE_OPERAND (bound, 0);
20770 break;
20771
20772 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20773 are even omitted when they are the default. */
20774 case INTEGER_CST:
20775 /* If the value for this bound is the default one, we can even omit the
20776 attribute. */
20777 if (bound_attr == DW_AT_lower_bound
20778 && tree_fits_shwi_p (bound)
20779 && (dflt = lower_bound_default ()) != -1
20780 && tree_to_shwi (bound) == dflt)
20781 return;
20782
20783 /* FALLTHRU */
20784
20785 default:
20786 /* Because of the complex interaction there can be with other GNAT
20787 encodings, GDB isn't ready yet to handle proper DWARF description
20788 for self-referencial subrange bounds: let GNAT encodings do the
20789 magic in such a case. */
20790 if (is_ada ()
20791 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20792 && contains_placeholder_p (bound))
20793 return;
20794
20795 add_scalar_info (subrange_die, bound_attr, bound,
20796 dw_scalar_form_constant
20797 | dw_scalar_form_exprloc
20798 | dw_scalar_form_reference,
20799 context);
20800 return;
20801 }
20802 }
20803
20804 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20805 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20806 Note that the block of subscript information for an array type also
20807 includes information about the element type of the given array type.
20808
20809 This function reuses previously set type and bound information if
20810 available. */
20811
20812 static void
20813 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20814 {
20815 unsigned dimension_number;
20816 tree lower, upper;
20817 dw_die_ref child = type_die->die_child;
20818
20819 for (dimension_number = 0;
20820 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20821 type = TREE_TYPE (type), dimension_number++)
20822 {
20823 tree domain = TYPE_DOMAIN (type);
20824
20825 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20826 break;
20827
20828 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20829 and (in GNU C only) variable bounds. Handle all three forms
20830 here. */
20831
20832 /* Find and reuse a previously generated DW_TAG_subrange_type if
20833 available.
20834
20835 For multi-dimensional arrays, as we iterate through the
20836 various dimensions in the enclosing for loop above, we also
20837 iterate through the DIE children and pick at each
20838 DW_TAG_subrange_type previously generated (if available).
20839 Each child DW_TAG_subrange_type DIE describes the range of
20840 the current dimension. At this point we should have as many
20841 DW_TAG_subrange_type's as we have dimensions in the
20842 array. */
20843 dw_die_ref subrange_die = NULL;
20844 if (child)
20845 while (1)
20846 {
20847 child = child->die_sib;
20848 if (child->die_tag == DW_TAG_subrange_type)
20849 subrange_die = child;
20850 if (child == type_die->die_child)
20851 {
20852 /* If we wrapped around, stop looking next time. */
20853 child = NULL;
20854 break;
20855 }
20856 if (child->die_tag == DW_TAG_subrange_type)
20857 break;
20858 }
20859 if (!subrange_die)
20860 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20861
20862 if (domain)
20863 {
20864 /* We have an array type with specified bounds. */
20865 lower = TYPE_MIN_VALUE (domain);
20866 upper = TYPE_MAX_VALUE (domain);
20867
20868 /* Define the index type. */
20869 if (TREE_TYPE (domain)
20870 && !get_AT (subrange_die, DW_AT_type))
20871 {
20872 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20873 TREE_TYPE field. We can't emit debug info for this
20874 because it is an unnamed integral type. */
20875 if (TREE_CODE (domain) == INTEGER_TYPE
20876 && TYPE_NAME (domain) == NULL_TREE
20877 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20878 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20879 ;
20880 else
20881 add_type_attribute (subrange_die, TREE_TYPE (domain),
20882 TYPE_UNQUALIFIED, false, type_die);
20883 }
20884
20885 /* ??? If upper is NULL, the array has unspecified length,
20886 but it does have a lower bound. This happens with Fortran
20887 dimension arr(N:*)
20888 Since the debugger is definitely going to need to know N
20889 to produce useful results, go ahead and output the lower
20890 bound solo, and hope the debugger can cope. */
20891
20892 if (!get_AT (subrange_die, DW_AT_lower_bound))
20893 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20894 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20895 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20896 }
20897
20898 /* Otherwise we have an array type with an unspecified length. The
20899 DWARF-2 spec does not say how to handle this; let's just leave out the
20900 bounds. */
20901 }
20902 }
20903
20904 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20905
20906 static void
20907 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20908 {
20909 dw_die_ref decl_die;
20910 HOST_WIDE_INT size;
20911 dw_loc_descr_ref size_expr = NULL;
20912
20913 switch (TREE_CODE (tree_node))
20914 {
20915 case ERROR_MARK:
20916 size = 0;
20917 break;
20918 case ENUMERAL_TYPE:
20919 case RECORD_TYPE:
20920 case UNION_TYPE:
20921 case QUAL_UNION_TYPE:
20922 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20923 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20924 {
20925 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20926 return;
20927 }
20928 size_expr = type_byte_size (tree_node, &size);
20929 break;
20930 case FIELD_DECL:
20931 /* For a data member of a struct or union, the DW_AT_byte_size is
20932 generally given as the number of bytes normally allocated for an
20933 object of the *declared* type of the member itself. This is true
20934 even for bit-fields. */
20935 size = int_size_in_bytes (field_type (tree_node));
20936 break;
20937 default:
20938 gcc_unreachable ();
20939 }
20940
20941 /* Support for dynamically-sized objects was introduced by DWARFv3.
20942 At the moment, GDB does not handle variable byte sizes very well,
20943 though. */
20944 if ((dwarf_version >= 3 || !dwarf_strict)
20945 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20946 && size_expr != NULL)
20947 add_AT_loc (die, DW_AT_byte_size, size_expr);
20948
20949 /* Note that `size' might be -1 when we get to this point. If it is, that
20950 indicates that the byte size of the entity in question is variable and
20951 that we could not generate a DWARF expression that computes it. */
20952 if (size >= 0)
20953 add_AT_unsigned (die, DW_AT_byte_size, size);
20954 }
20955
20956 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20957 alignment. */
20958
20959 static void
20960 add_alignment_attribute (dw_die_ref die, tree tree_node)
20961 {
20962 if (dwarf_version < 5 && dwarf_strict)
20963 return;
20964
20965 unsigned align;
20966
20967 if (DECL_P (tree_node))
20968 {
20969 if (!DECL_USER_ALIGN (tree_node))
20970 return;
20971
20972 align = DECL_ALIGN_UNIT (tree_node);
20973 }
20974 else if (TYPE_P (tree_node))
20975 {
20976 if (!TYPE_USER_ALIGN (tree_node))
20977 return;
20978
20979 align = TYPE_ALIGN_UNIT (tree_node);
20980 }
20981 else
20982 gcc_unreachable ();
20983
20984 add_AT_unsigned (die, DW_AT_alignment, align);
20985 }
20986
20987 /* For a FIELD_DECL node which represents a bit-field, output an attribute
20988 which specifies the distance in bits from the highest order bit of the
20989 "containing object" for the bit-field to the highest order bit of the
20990 bit-field itself.
20991
20992 For any given bit-field, the "containing object" is a hypothetical object
20993 (of some integral or enum type) within which the given bit-field lives. The
20994 type of this hypothetical "containing object" is always the same as the
20995 declared type of the individual bit-field itself. The determination of the
20996 exact location of the "containing object" for a bit-field is rather
20997 complicated. It's handled by the `field_byte_offset' function (above).
20998
20999 CTX is required: see the comment for VLR_CONTEXT.
21000
21001 Note that it is the size (in bytes) of the hypothetical "containing object"
21002 which will be given in the DW_AT_byte_size attribute for this bit-field.
21003 (See `byte_size_attribute' above). */
21004
21005 static inline void
21006 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21007 {
21008 HOST_WIDE_INT object_offset_in_bytes;
21009 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21010 HOST_WIDE_INT bitpos_int;
21011 HOST_WIDE_INT highest_order_object_bit_offset;
21012 HOST_WIDE_INT highest_order_field_bit_offset;
21013 HOST_WIDE_INT bit_offset;
21014
21015 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21016
21017 /* Must be a field and a bit field. */
21018 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21019
21020 /* We can't yet handle bit-fields whose offsets are variable, so if we
21021 encounter such things, just return without generating any attribute
21022 whatsoever. Likewise for variable or too large size. */
21023 if (! tree_fits_shwi_p (bit_position (decl))
21024 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21025 return;
21026
21027 bitpos_int = int_bit_position (decl);
21028
21029 /* Note that the bit offset is always the distance (in bits) from the
21030 highest-order bit of the "containing object" to the highest-order bit of
21031 the bit-field itself. Since the "high-order end" of any object or field
21032 is different on big-endian and little-endian machines, the computation
21033 below must take account of these differences. */
21034 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21035 highest_order_field_bit_offset = bitpos_int;
21036
21037 if (! BYTES_BIG_ENDIAN)
21038 {
21039 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21040 highest_order_object_bit_offset +=
21041 simple_type_size_in_bits (original_type);
21042 }
21043
21044 bit_offset
21045 = (! BYTES_BIG_ENDIAN
21046 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21047 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21048
21049 if (bit_offset < 0)
21050 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21051 else
21052 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21053 }
21054
21055 /* For a FIELD_DECL node which represents a bit field, output an attribute
21056 which specifies the length in bits of the given field. */
21057
21058 static inline void
21059 add_bit_size_attribute (dw_die_ref die, tree decl)
21060 {
21061 /* Must be a field and a bit field. */
21062 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21063 && DECL_BIT_FIELD_TYPE (decl));
21064
21065 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21066 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21067 }
21068
21069 /* If the compiled language is ANSI C, then add a 'prototyped'
21070 attribute, if arg types are given for the parameters of a function. */
21071
21072 static inline void
21073 add_prototyped_attribute (dw_die_ref die, tree func_type)
21074 {
21075 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21076 {
21077 case DW_LANG_C:
21078 case DW_LANG_C89:
21079 case DW_LANG_C99:
21080 case DW_LANG_C11:
21081 case DW_LANG_ObjC:
21082 if (prototype_p (func_type))
21083 add_AT_flag (die, DW_AT_prototyped, 1);
21084 break;
21085 default:
21086 break;
21087 }
21088 }
21089
21090 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21091 by looking in the type declaration, the object declaration equate table or
21092 the block mapping. */
21093
21094 static inline dw_die_ref
21095 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21096 {
21097 dw_die_ref origin_die = NULL;
21098
21099 if (DECL_P (origin))
21100 {
21101 dw_die_ref c;
21102 origin_die = lookup_decl_die (origin);
21103 /* "Unwrap" the decls DIE which we put in the imported unit context.
21104 We are looking for the abstract copy here. */
21105 if (in_lto_p
21106 && origin_die
21107 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
21108 /* ??? Identify this better. */
21109 && c->with_offset)
21110 origin_die = c;
21111 }
21112 else if (TYPE_P (origin))
21113 origin_die = lookup_type_die (origin);
21114 else if (TREE_CODE (origin) == BLOCK)
21115 origin_die = BLOCK_DIE (origin);
21116
21117 /* XXX: Functions that are never lowered don't always have correct block
21118 trees (in the case of java, they simply have no block tree, in some other
21119 languages). For these functions, there is nothing we can really do to
21120 output correct debug info for inlined functions in all cases. Rather
21121 than die, we'll just produce deficient debug info now, in that we will
21122 have variables without a proper abstract origin. In the future, when all
21123 functions are lowered, we should re-add a gcc_assert (origin_die)
21124 here. */
21125
21126 if (origin_die)
21127 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21128 return origin_die;
21129 }
21130
21131 /* We do not currently support the pure_virtual attribute. */
21132
21133 static inline void
21134 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21135 {
21136 if (DECL_VINDEX (func_decl))
21137 {
21138 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21139
21140 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21141 add_AT_loc (die, DW_AT_vtable_elem_location,
21142 new_loc_descr (DW_OP_constu,
21143 tree_to_shwi (DECL_VINDEX (func_decl)),
21144 0));
21145
21146 /* GNU extension: Record what type this method came from originally. */
21147 if (debug_info_level > DINFO_LEVEL_TERSE
21148 && DECL_CONTEXT (func_decl))
21149 add_AT_die_ref (die, DW_AT_containing_type,
21150 lookup_type_die (DECL_CONTEXT (func_decl)));
21151 }
21152 }
21153 \f
21154 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21155 given decl. This used to be a vendor extension until after DWARF 4
21156 standardized it. */
21157
21158 static void
21159 add_linkage_attr (dw_die_ref die, tree decl)
21160 {
21161 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21162
21163 /* Mimic what assemble_name_raw does with a leading '*'. */
21164 if (name[0] == '*')
21165 name = &name[1];
21166
21167 if (dwarf_version >= 4)
21168 add_AT_string (die, DW_AT_linkage_name, name);
21169 else
21170 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21171 }
21172
21173 /* Add source coordinate attributes for the given decl. */
21174
21175 static void
21176 add_src_coords_attributes (dw_die_ref die, tree decl)
21177 {
21178 expanded_location s;
21179
21180 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21181 return;
21182 s = expand_location (DECL_SOURCE_LOCATION (decl));
21183 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21184 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21185 if (debug_column_info && s.column)
21186 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21187 }
21188
21189 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21190
21191 static void
21192 add_linkage_name_raw (dw_die_ref die, tree decl)
21193 {
21194 /* Defer until we have an assembler name set. */
21195 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21196 {
21197 limbo_die_node *asm_name;
21198
21199 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21200 asm_name->die = die;
21201 asm_name->created_for = decl;
21202 asm_name->next = deferred_asm_name;
21203 deferred_asm_name = asm_name;
21204 }
21205 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21206 add_linkage_attr (die, decl);
21207 }
21208
21209 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21210
21211 static void
21212 add_linkage_name (dw_die_ref die, tree decl)
21213 {
21214 if (debug_info_level > DINFO_LEVEL_NONE
21215 && VAR_OR_FUNCTION_DECL_P (decl)
21216 && TREE_PUBLIC (decl)
21217 && !(VAR_P (decl) && DECL_REGISTER (decl))
21218 && die->die_tag != DW_TAG_member)
21219 add_linkage_name_raw (die, decl);
21220 }
21221
21222 /* Add a DW_AT_name attribute and source coordinate attribute for the
21223 given decl, but only if it actually has a name. */
21224
21225 static void
21226 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21227 bool no_linkage_name)
21228 {
21229 tree decl_name;
21230
21231 decl_name = DECL_NAME (decl);
21232 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21233 {
21234 const char *name = dwarf2_name (decl, 0);
21235 if (name)
21236 add_name_attribute (die, name);
21237 if (! DECL_ARTIFICIAL (decl))
21238 add_src_coords_attributes (die, decl);
21239
21240 if (!no_linkage_name)
21241 add_linkage_name (die, decl);
21242 }
21243
21244 #ifdef VMS_DEBUGGING_INFO
21245 /* Get the function's name, as described by its RTL. This may be different
21246 from the DECL_NAME name used in the source file. */
21247 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21248 {
21249 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21250 XEXP (DECL_RTL (decl), 0), false);
21251 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21252 }
21253 #endif /* VMS_DEBUGGING_INFO */
21254 }
21255
21256 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21257
21258 static void
21259 add_discr_value (dw_die_ref die, dw_discr_value *value)
21260 {
21261 dw_attr_node attr;
21262
21263 attr.dw_attr = DW_AT_discr_value;
21264 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21265 attr.dw_attr_val.val_entry = NULL;
21266 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21267 if (value->pos)
21268 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21269 else
21270 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21271 add_dwarf_attr (die, &attr);
21272 }
21273
21274 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21275
21276 static void
21277 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21278 {
21279 dw_attr_node attr;
21280
21281 attr.dw_attr = DW_AT_discr_list;
21282 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21283 attr.dw_attr_val.val_entry = NULL;
21284 attr.dw_attr_val.v.val_discr_list = discr_list;
21285 add_dwarf_attr (die, &attr);
21286 }
21287
21288 static inline dw_discr_list_ref
21289 AT_discr_list (dw_attr_node *attr)
21290 {
21291 return attr->dw_attr_val.v.val_discr_list;
21292 }
21293
21294 #ifdef VMS_DEBUGGING_INFO
21295 /* Output the debug main pointer die for VMS */
21296
21297 void
21298 dwarf2out_vms_debug_main_pointer (void)
21299 {
21300 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21301 dw_die_ref die;
21302
21303 /* Allocate the VMS debug main subprogram die. */
21304 die = new_die_raw (DW_TAG_subprogram);
21305 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21306 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21307 current_function_funcdef_no);
21308 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21309
21310 /* Make it the first child of comp_unit_die (). */
21311 die->die_parent = comp_unit_die ();
21312 if (comp_unit_die ()->die_child)
21313 {
21314 die->die_sib = comp_unit_die ()->die_child->die_sib;
21315 comp_unit_die ()->die_child->die_sib = die;
21316 }
21317 else
21318 {
21319 die->die_sib = die;
21320 comp_unit_die ()->die_child = die;
21321 }
21322 }
21323 #endif /* VMS_DEBUGGING_INFO */
21324
21325 /* Push a new declaration scope. */
21326
21327 static void
21328 push_decl_scope (tree scope)
21329 {
21330 vec_safe_push (decl_scope_table, scope);
21331 }
21332
21333 /* Pop a declaration scope. */
21334
21335 static inline void
21336 pop_decl_scope (void)
21337 {
21338 decl_scope_table->pop ();
21339 }
21340
21341 /* walk_tree helper function for uses_local_type, below. */
21342
21343 static tree
21344 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21345 {
21346 if (!TYPE_P (*tp))
21347 *walk_subtrees = 0;
21348 else
21349 {
21350 tree name = TYPE_NAME (*tp);
21351 if (name && DECL_P (name) && decl_function_context (name))
21352 return *tp;
21353 }
21354 return NULL_TREE;
21355 }
21356
21357 /* If TYPE involves a function-local type (including a local typedef to a
21358 non-local type), returns that type; otherwise returns NULL_TREE. */
21359
21360 static tree
21361 uses_local_type (tree type)
21362 {
21363 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21364 return used;
21365 }
21366
21367 /* Return the DIE for the scope that immediately contains this type.
21368 Non-named types that do not involve a function-local type get global
21369 scope. Named types nested in namespaces or other types get their
21370 containing scope. All other types (i.e. function-local named types) get
21371 the current active scope. */
21372
21373 static dw_die_ref
21374 scope_die_for (tree t, dw_die_ref context_die)
21375 {
21376 dw_die_ref scope_die = NULL;
21377 tree containing_scope;
21378
21379 /* Non-types always go in the current scope. */
21380 gcc_assert (TYPE_P (t));
21381
21382 /* Use the scope of the typedef, rather than the scope of the type
21383 it refers to. */
21384 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21385 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21386 else
21387 containing_scope = TYPE_CONTEXT (t);
21388
21389 /* Use the containing namespace if there is one. */
21390 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21391 {
21392 if (context_die == lookup_decl_die (containing_scope))
21393 /* OK */;
21394 else if (debug_info_level > DINFO_LEVEL_TERSE)
21395 context_die = get_context_die (containing_scope);
21396 else
21397 containing_scope = NULL_TREE;
21398 }
21399
21400 /* Ignore function type "scopes" from the C frontend. They mean that
21401 a tagged type is local to a parmlist of a function declarator, but
21402 that isn't useful to DWARF. */
21403 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21404 containing_scope = NULL_TREE;
21405
21406 if (SCOPE_FILE_SCOPE_P (containing_scope))
21407 {
21408 /* If T uses a local type keep it local as well, to avoid references
21409 to function-local DIEs from outside the function. */
21410 if (current_function_decl && uses_local_type (t))
21411 scope_die = context_die;
21412 else
21413 scope_die = comp_unit_die ();
21414 }
21415 else if (TYPE_P (containing_scope))
21416 {
21417 /* For types, we can just look up the appropriate DIE. */
21418 if (debug_info_level > DINFO_LEVEL_TERSE)
21419 scope_die = get_context_die (containing_scope);
21420 else
21421 {
21422 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21423 if (scope_die == NULL)
21424 scope_die = comp_unit_die ();
21425 }
21426 }
21427 else
21428 scope_die = context_die;
21429
21430 return scope_die;
21431 }
21432
21433 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21434
21435 static inline int
21436 local_scope_p (dw_die_ref context_die)
21437 {
21438 for (; context_die; context_die = context_die->die_parent)
21439 if (context_die->die_tag == DW_TAG_inlined_subroutine
21440 || context_die->die_tag == DW_TAG_subprogram)
21441 return 1;
21442
21443 return 0;
21444 }
21445
21446 /* Returns nonzero if CONTEXT_DIE is a class. */
21447
21448 static inline int
21449 class_scope_p (dw_die_ref context_die)
21450 {
21451 return (context_die
21452 && (context_die->die_tag == DW_TAG_structure_type
21453 || context_die->die_tag == DW_TAG_class_type
21454 || context_die->die_tag == DW_TAG_interface_type
21455 || context_die->die_tag == DW_TAG_union_type));
21456 }
21457
21458 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21459 whether or not to treat a DIE in this context as a declaration. */
21460
21461 static inline int
21462 class_or_namespace_scope_p (dw_die_ref context_die)
21463 {
21464 return (class_scope_p (context_die)
21465 || (context_die && context_die->die_tag == DW_TAG_namespace));
21466 }
21467
21468 /* Many forms of DIEs require a "type description" attribute. This
21469 routine locates the proper "type descriptor" die for the type given
21470 by 'type' plus any additional qualifiers given by 'cv_quals', and
21471 adds a DW_AT_type attribute below the given die. */
21472
21473 static void
21474 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21475 bool reverse, dw_die_ref context_die)
21476 {
21477 enum tree_code code = TREE_CODE (type);
21478 dw_die_ref type_die = NULL;
21479
21480 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21481 or fixed-point type, use the inner type. This is because we have no
21482 support for unnamed types in base_type_die. This can happen if this is
21483 an Ada subrange type. Correct solution is emit a subrange type die. */
21484 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21485 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21486 type = TREE_TYPE (type), code = TREE_CODE (type);
21487
21488 if (code == ERROR_MARK
21489 /* Handle a special case. For functions whose return type is void, we
21490 generate *no* type attribute. (Note that no object may have type
21491 `void', so this only applies to function return types). */
21492 || code == VOID_TYPE)
21493 return;
21494
21495 type_die = modified_type_die (type,
21496 cv_quals | TYPE_QUALS (type),
21497 reverse,
21498 context_die);
21499
21500 if (type_die != NULL)
21501 add_AT_die_ref (object_die, DW_AT_type, type_die);
21502 }
21503
21504 /* Given an object die, add the calling convention attribute for the
21505 function call type. */
21506 static void
21507 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21508 {
21509 enum dwarf_calling_convention value = DW_CC_normal;
21510
21511 value = ((enum dwarf_calling_convention)
21512 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21513
21514 if (is_fortran ()
21515 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21516 {
21517 /* DWARF 2 doesn't provide a way to identify a program's source-level
21518 entry point. DW_AT_calling_convention attributes are only meant
21519 to describe functions' calling conventions. However, lacking a
21520 better way to signal the Fortran main program, we used this for
21521 a long time, following existing custom. Now, DWARF 4 has
21522 DW_AT_main_subprogram, which we add below, but some tools still
21523 rely on the old way, which we thus keep. */
21524 value = DW_CC_program;
21525
21526 if (dwarf_version >= 4 || !dwarf_strict)
21527 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21528 }
21529
21530 /* Only add the attribute if the backend requests it, and
21531 is not DW_CC_normal. */
21532 if (value && (value != DW_CC_normal))
21533 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21534 }
21535
21536 /* Given a tree pointer to a struct, class, union, or enum type node, return
21537 a pointer to the (string) tag name for the given type, or zero if the type
21538 was declared without a tag. */
21539
21540 static const char *
21541 type_tag (const_tree type)
21542 {
21543 const char *name = 0;
21544
21545 if (TYPE_NAME (type) != 0)
21546 {
21547 tree t = 0;
21548
21549 /* Find the IDENTIFIER_NODE for the type name. */
21550 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21551 && !TYPE_NAMELESS (type))
21552 t = TYPE_NAME (type);
21553
21554 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21555 a TYPE_DECL node, regardless of whether or not a `typedef' was
21556 involved. */
21557 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21558 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21559 {
21560 /* We want to be extra verbose. Don't call dwarf_name if
21561 DECL_NAME isn't set. The default hook for decl_printable_name
21562 doesn't like that, and in this context it's correct to return
21563 0, instead of "<anonymous>" or the like. */
21564 if (DECL_NAME (TYPE_NAME (type))
21565 && !DECL_NAMELESS (TYPE_NAME (type)))
21566 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21567 }
21568
21569 /* Now get the name as a string, or invent one. */
21570 if (!name && t != 0)
21571 name = IDENTIFIER_POINTER (t);
21572 }
21573
21574 return (name == 0 || *name == '\0') ? 0 : name;
21575 }
21576
21577 /* Return the type associated with a data member, make a special check
21578 for bit field types. */
21579
21580 static inline tree
21581 member_declared_type (const_tree member)
21582 {
21583 return (DECL_BIT_FIELD_TYPE (member)
21584 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21585 }
21586
21587 /* Get the decl's label, as described by its RTL. This may be different
21588 from the DECL_NAME name used in the source file. */
21589
21590 #if 0
21591 static const char *
21592 decl_start_label (tree decl)
21593 {
21594 rtx x;
21595 const char *fnname;
21596
21597 x = DECL_RTL (decl);
21598 gcc_assert (MEM_P (x));
21599
21600 x = XEXP (x, 0);
21601 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21602
21603 fnname = XSTR (x, 0);
21604 return fnname;
21605 }
21606 #endif
21607 \f
21608 /* For variable-length arrays that have been previously generated, but
21609 may be incomplete due to missing subscript info, fill the subscript
21610 info. Return TRUE if this is one of those cases. */
21611 static bool
21612 fill_variable_array_bounds (tree type)
21613 {
21614 if (TREE_ASM_WRITTEN (type)
21615 && TREE_CODE (type) == ARRAY_TYPE
21616 && variably_modified_type_p (type, NULL))
21617 {
21618 dw_die_ref array_die = lookup_type_die (type);
21619 if (!array_die)
21620 return false;
21621 add_subscript_info (array_die, type, !is_ada ());
21622 return true;
21623 }
21624 return false;
21625 }
21626
21627 /* These routines generate the internal representation of the DIE's for
21628 the compilation unit. Debugging information is collected by walking
21629 the declaration trees passed in from dwarf2out_decl(). */
21630
21631 static void
21632 gen_array_type_die (tree type, dw_die_ref context_die)
21633 {
21634 dw_die_ref array_die;
21635
21636 /* GNU compilers represent multidimensional array types as sequences of one
21637 dimensional array types whose element types are themselves array types.
21638 We sometimes squish that down to a single array_type DIE with multiple
21639 subscripts in the Dwarf debugging info. The draft Dwarf specification
21640 say that we are allowed to do this kind of compression in C, because
21641 there is no difference between an array of arrays and a multidimensional
21642 array. We don't do this for Ada to remain as close as possible to the
21643 actual representation, which is especially important against the language
21644 flexibilty wrt arrays of variable size. */
21645
21646 bool collapse_nested_arrays = !is_ada ();
21647
21648 if (fill_variable_array_bounds (type))
21649 return;
21650
21651 dw_die_ref scope_die = scope_die_for (type, context_die);
21652 tree element_type;
21653
21654 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21655 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21656 if (TYPE_STRING_FLAG (type)
21657 && TREE_CODE (type) == ARRAY_TYPE
21658 && is_fortran ()
21659 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21660 {
21661 HOST_WIDE_INT size;
21662
21663 array_die = new_die (DW_TAG_string_type, scope_die, type);
21664 add_name_attribute (array_die, type_tag (type));
21665 equate_type_number_to_die (type, array_die);
21666 size = int_size_in_bytes (type);
21667 if (size >= 0)
21668 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21669 /* ??? We can't annotate types late, but for LTO we may not
21670 generate a location early either (gfortran.dg/save_6.f90). */
21671 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21672 && TYPE_DOMAIN (type) != NULL_TREE
21673 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21674 {
21675 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21676 tree rszdecl = szdecl;
21677
21678 size = int_size_in_bytes (TREE_TYPE (szdecl));
21679 if (!DECL_P (szdecl))
21680 {
21681 if (TREE_CODE (szdecl) == INDIRECT_REF
21682 && DECL_P (TREE_OPERAND (szdecl, 0)))
21683 {
21684 rszdecl = TREE_OPERAND (szdecl, 0);
21685 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21686 != DWARF2_ADDR_SIZE)
21687 size = 0;
21688 }
21689 else
21690 size = 0;
21691 }
21692 if (size > 0)
21693 {
21694 dw_loc_list_ref loc
21695 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21696 NULL);
21697 if (loc)
21698 {
21699 add_AT_location_description (array_die, DW_AT_string_length,
21700 loc);
21701 if (size != DWARF2_ADDR_SIZE)
21702 add_AT_unsigned (array_die, dwarf_version >= 5
21703 ? DW_AT_string_length_byte_size
21704 : DW_AT_byte_size, size);
21705 }
21706 }
21707 }
21708 return;
21709 }
21710
21711 array_die = new_die (DW_TAG_array_type, scope_die, type);
21712 add_name_attribute (array_die, type_tag (type));
21713 equate_type_number_to_die (type, array_die);
21714
21715 if (TREE_CODE (type) == VECTOR_TYPE)
21716 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21717
21718 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21719 if (is_fortran ()
21720 && TREE_CODE (type) == ARRAY_TYPE
21721 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21722 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21723 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21724
21725 #if 0
21726 /* We default the array ordering. Debuggers will probably do the right
21727 things even if DW_AT_ordering is not present. It's not even an issue
21728 until we start to get into multidimensional arrays anyway. If a debugger
21729 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21730 then we'll have to put the DW_AT_ordering attribute back in. (But if
21731 and when we find out that we need to put these in, we will only do so
21732 for multidimensional arrays. */
21733 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21734 #endif
21735
21736 if (TREE_CODE (type) == VECTOR_TYPE)
21737 {
21738 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21739 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21740 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21741 add_bound_info (subrange_die, DW_AT_upper_bound,
21742 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21743 }
21744 else
21745 add_subscript_info (array_die, type, collapse_nested_arrays);
21746
21747 /* Add representation of the type of the elements of this array type and
21748 emit the corresponding DIE if we haven't done it already. */
21749 element_type = TREE_TYPE (type);
21750 if (collapse_nested_arrays)
21751 while (TREE_CODE (element_type) == ARRAY_TYPE)
21752 {
21753 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21754 break;
21755 element_type = TREE_TYPE (element_type);
21756 }
21757
21758 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21759 TREE_CODE (type) == ARRAY_TYPE
21760 && TYPE_REVERSE_STORAGE_ORDER (type),
21761 context_die);
21762
21763 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21764 if (TYPE_ARTIFICIAL (type))
21765 add_AT_flag (array_die, DW_AT_artificial, 1);
21766
21767 if (get_AT (array_die, DW_AT_name))
21768 add_pubtype (type, array_die);
21769
21770 add_alignment_attribute (array_die, type);
21771 }
21772
21773 /* This routine generates DIE for array with hidden descriptor, details
21774 are filled into *info by a langhook. */
21775
21776 static void
21777 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21778 dw_die_ref context_die)
21779 {
21780 const dw_die_ref scope_die = scope_die_for (type, context_die);
21781 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21782 struct loc_descr_context context = { type, info->base_decl, NULL,
21783 false, false };
21784 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21785 int dim;
21786
21787 add_name_attribute (array_die, type_tag (type));
21788 equate_type_number_to_die (type, array_die);
21789
21790 if (info->ndimensions > 1)
21791 switch (info->ordering)
21792 {
21793 case array_descr_ordering_row_major:
21794 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21795 break;
21796 case array_descr_ordering_column_major:
21797 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21798 break;
21799 default:
21800 break;
21801 }
21802
21803 if (dwarf_version >= 3 || !dwarf_strict)
21804 {
21805 if (info->data_location)
21806 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21807 dw_scalar_form_exprloc, &context);
21808 if (info->associated)
21809 add_scalar_info (array_die, DW_AT_associated, info->associated,
21810 dw_scalar_form_constant
21811 | dw_scalar_form_exprloc
21812 | dw_scalar_form_reference, &context);
21813 if (info->allocated)
21814 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21815 dw_scalar_form_constant
21816 | dw_scalar_form_exprloc
21817 | dw_scalar_form_reference, &context);
21818 if (info->stride)
21819 {
21820 const enum dwarf_attribute attr
21821 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21822 const int forms
21823 = (info->stride_in_bits)
21824 ? dw_scalar_form_constant
21825 : (dw_scalar_form_constant
21826 | dw_scalar_form_exprloc
21827 | dw_scalar_form_reference);
21828
21829 add_scalar_info (array_die, attr, info->stride, forms, &context);
21830 }
21831 }
21832 if (dwarf_version >= 5)
21833 {
21834 if (info->rank)
21835 {
21836 add_scalar_info (array_die, DW_AT_rank, info->rank,
21837 dw_scalar_form_constant
21838 | dw_scalar_form_exprloc, &context);
21839 subrange_tag = DW_TAG_generic_subrange;
21840 context.placeholder_arg = true;
21841 }
21842 }
21843
21844 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21845
21846 for (dim = 0; dim < info->ndimensions; dim++)
21847 {
21848 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21849
21850 if (info->dimen[dim].bounds_type)
21851 add_type_attribute (subrange_die,
21852 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21853 false, context_die);
21854 if (info->dimen[dim].lower_bound)
21855 add_bound_info (subrange_die, DW_AT_lower_bound,
21856 info->dimen[dim].lower_bound, &context);
21857 if (info->dimen[dim].upper_bound)
21858 add_bound_info (subrange_die, DW_AT_upper_bound,
21859 info->dimen[dim].upper_bound, &context);
21860 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21861 add_scalar_info (subrange_die, DW_AT_byte_stride,
21862 info->dimen[dim].stride,
21863 dw_scalar_form_constant
21864 | dw_scalar_form_exprloc
21865 | dw_scalar_form_reference,
21866 &context);
21867 }
21868
21869 gen_type_die (info->element_type, context_die);
21870 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21871 TREE_CODE (type) == ARRAY_TYPE
21872 && TYPE_REVERSE_STORAGE_ORDER (type),
21873 context_die);
21874
21875 if (get_AT (array_die, DW_AT_name))
21876 add_pubtype (type, array_die);
21877
21878 add_alignment_attribute (array_die, type);
21879 }
21880
21881 #if 0
21882 static void
21883 gen_entry_point_die (tree decl, dw_die_ref context_die)
21884 {
21885 tree origin = decl_ultimate_origin (decl);
21886 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21887
21888 if (origin != NULL)
21889 add_abstract_origin_attribute (decl_die, origin);
21890 else
21891 {
21892 add_name_and_src_coords_attributes (decl_die, decl);
21893 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21894 TYPE_UNQUALIFIED, false, context_die);
21895 }
21896
21897 if (DECL_ABSTRACT_P (decl))
21898 equate_decl_number_to_die (decl, decl_die);
21899 else
21900 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21901 }
21902 #endif
21903
21904 /* Walk through the list of incomplete types again, trying once more to
21905 emit full debugging info for them. */
21906
21907 static void
21908 retry_incomplete_types (void)
21909 {
21910 set_early_dwarf s;
21911 int i;
21912
21913 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21914 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21915 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21916 vec_safe_truncate (incomplete_types, 0);
21917 }
21918
21919 /* Determine what tag to use for a record type. */
21920
21921 static enum dwarf_tag
21922 record_type_tag (tree type)
21923 {
21924 if (! lang_hooks.types.classify_record)
21925 return DW_TAG_structure_type;
21926
21927 switch (lang_hooks.types.classify_record (type))
21928 {
21929 case RECORD_IS_STRUCT:
21930 return DW_TAG_structure_type;
21931
21932 case RECORD_IS_CLASS:
21933 return DW_TAG_class_type;
21934
21935 case RECORD_IS_INTERFACE:
21936 if (dwarf_version >= 3 || !dwarf_strict)
21937 return DW_TAG_interface_type;
21938 return DW_TAG_structure_type;
21939
21940 default:
21941 gcc_unreachable ();
21942 }
21943 }
21944
21945 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21946 include all of the information about the enumeration values also. Each
21947 enumerated type name/value is listed as a child of the enumerated type
21948 DIE. */
21949
21950 static dw_die_ref
21951 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21952 {
21953 dw_die_ref type_die = lookup_type_die (type);
21954 dw_die_ref orig_type_die = type_die;
21955
21956 if (type_die == NULL)
21957 {
21958 type_die = new_die (DW_TAG_enumeration_type,
21959 scope_die_for (type, context_die), type);
21960 equate_type_number_to_die (type, type_die);
21961 add_name_attribute (type_die, type_tag (type));
21962 if ((dwarf_version >= 4 || !dwarf_strict)
21963 && ENUM_IS_SCOPED (type))
21964 add_AT_flag (type_die, DW_AT_enum_class, 1);
21965 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
21966 add_AT_flag (type_die, DW_AT_declaration, 1);
21967 if (!dwarf_strict)
21968 add_AT_unsigned (type_die, DW_AT_encoding,
21969 TYPE_UNSIGNED (type)
21970 ? DW_ATE_unsigned
21971 : DW_ATE_signed);
21972 }
21973 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
21974 return type_die;
21975 else
21976 remove_AT (type_die, DW_AT_declaration);
21977
21978 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
21979 given enum type is incomplete, do not generate the DW_AT_byte_size
21980 attribute or the DW_AT_element_list attribute. */
21981 if (TYPE_SIZE (type))
21982 {
21983 tree link;
21984
21985 if (!ENUM_IS_OPAQUE (type))
21986 TREE_ASM_WRITTEN (type) = 1;
21987 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
21988 add_byte_size_attribute (type_die, type);
21989 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
21990 add_alignment_attribute (type_die, type);
21991 if ((dwarf_version >= 3 || !dwarf_strict)
21992 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
21993 {
21994 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
21995 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
21996 context_die);
21997 }
21998 if (TYPE_STUB_DECL (type) != NULL_TREE)
21999 {
22000 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22001 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22002 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22003 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22004 }
22005
22006 /* If the first reference to this type was as the return type of an
22007 inline function, then it may not have a parent. Fix this now. */
22008 if (type_die->die_parent == NULL)
22009 add_child_die (scope_die_for (type, context_die), type_die);
22010
22011 for (link = TYPE_VALUES (type);
22012 link != NULL; link = TREE_CHAIN (link))
22013 {
22014 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22015 tree value = TREE_VALUE (link);
22016
22017 gcc_assert (!ENUM_IS_OPAQUE (type));
22018 add_name_attribute (enum_die,
22019 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22020
22021 if (TREE_CODE (value) == CONST_DECL)
22022 value = DECL_INITIAL (value);
22023
22024 if (simple_type_size_in_bits (TREE_TYPE (value))
22025 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22026 {
22027 /* For constant forms created by add_AT_unsigned DWARF
22028 consumers (GDB, elfutils, etc.) always zero extend
22029 the value. Only when the actual value is negative
22030 do we need to use add_AT_int to generate a constant
22031 form that can represent negative values. */
22032 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22033 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22034 add_AT_unsigned (enum_die, DW_AT_const_value,
22035 (unsigned HOST_WIDE_INT) val);
22036 else
22037 add_AT_int (enum_die, DW_AT_const_value, val);
22038 }
22039 else
22040 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22041 that here. TODO: This should be re-worked to use correct
22042 signed/unsigned double tags for all cases. */
22043 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22044 }
22045
22046 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22047 if (TYPE_ARTIFICIAL (type)
22048 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22049 add_AT_flag (type_die, DW_AT_artificial, 1);
22050 }
22051 else
22052 add_AT_flag (type_die, DW_AT_declaration, 1);
22053
22054 add_pubtype (type, type_die);
22055
22056 return type_die;
22057 }
22058
22059 /* Generate a DIE to represent either a real live formal parameter decl or to
22060 represent just the type of some formal parameter position in some function
22061 type.
22062
22063 Note that this routine is a bit unusual because its argument may be a
22064 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22065 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22066 node. If it's the former then this function is being called to output a
22067 DIE to represent a formal parameter object (or some inlining thereof). If
22068 it's the latter, then this function is only being called to output a
22069 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22070 argument type of some subprogram type.
22071 If EMIT_NAME_P is true, name and source coordinate attributes
22072 are emitted. */
22073
22074 static dw_die_ref
22075 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22076 dw_die_ref context_die)
22077 {
22078 tree node_or_origin = node ? node : origin;
22079 tree ultimate_origin;
22080 dw_die_ref parm_die = NULL;
22081
22082 if (DECL_P (node_or_origin))
22083 {
22084 parm_die = lookup_decl_die (node);
22085
22086 /* If the contexts differ, we may not be talking about the same
22087 thing.
22088 ??? When in LTO the DIE parent is the "abstract" copy and the
22089 context_die is the specification "copy". But this whole block
22090 should eventually be no longer needed. */
22091 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22092 {
22093 if (!DECL_ABSTRACT_P (node))
22094 {
22095 /* This can happen when creating an inlined instance, in
22096 which case we need to create a new DIE that will get
22097 annotated with DW_AT_abstract_origin. */
22098 parm_die = NULL;
22099 }
22100 else
22101 gcc_unreachable ();
22102 }
22103
22104 if (parm_die && parm_die->die_parent == NULL)
22105 {
22106 /* Check that parm_die already has the right attributes that
22107 we would have added below. If any attributes are
22108 missing, fall through to add them. */
22109 if (! DECL_ABSTRACT_P (node_or_origin)
22110 && !get_AT (parm_die, DW_AT_location)
22111 && !get_AT (parm_die, DW_AT_const_value))
22112 /* We are missing location info, and are about to add it. */
22113 ;
22114 else
22115 {
22116 add_child_die (context_die, parm_die);
22117 return parm_die;
22118 }
22119 }
22120 }
22121
22122 /* If we have a previously generated DIE, use it, unless this is an
22123 concrete instance (origin != NULL), in which case we need a new
22124 DIE with a corresponding DW_AT_abstract_origin. */
22125 bool reusing_die;
22126 if (parm_die && origin == NULL)
22127 reusing_die = true;
22128 else
22129 {
22130 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22131 reusing_die = false;
22132 }
22133
22134 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22135 {
22136 case tcc_declaration:
22137 ultimate_origin = decl_ultimate_origin (node_or_origin);
22138 if (node || ultimate_origin)
22139 origin = ultimate_origin;
22140
22141 if (reusing_die)
22142 goto add_location;
22143
22144 if (origin != NULL)
22145 add_abstract_origin_attribute (parm_die, origin);
22146 else if (emit_name_p)
22147 add_name_and_src_coords_attributes (parm_die, node);
22148 if (origin == NULL
22149 || (! DECL_ABSTRACT_P (node_or_origin)
22150 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22151 decl_function_context
22152 (node_or_origin))))
22153 {
22154 tree type = TREE_TYPE (node_or_origin);
22155 if (decl_by_reference_p (node_or_origin))
22156 add_type_attribute (parm_die, TREE_TYPE (type),
22157 TYPE_UNQUALIFIED,
22158 false, context_die);
22159 else
22160 add_type_attribute (parm_die, type,
22161 decl_quals (node_or_origin),
22162 false, context_die);
22163 }
22164 if (origin == NULL && DECL_ARTIFICIAL (node))
22165 add_AT_flag (parm_die, DW_AT_artificial, 1);
22166 add_location:
22167 if (node && node != origin)
22168 equate_decl_number_to_die (node, parm_die);
22169 if (! DECL_ABSTRACT_P (node_or_origin))
22170 add_location_or_const_value_attribute (parm_die, node_or_origin,
22171 node == NULL);
22172
22173 break;
22174
22175 case tcc_type:
22176 /* We were called with some kind of a ..._TYPE node. */
22177 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22178 context_die);
22179 break;
22180
22181 default:
22182 gcc_unreachable ();
22183 }
22184
22185 return parm_die;
22186 }
22187
22188 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22189 children DW_TAG_formal_parameter DIEs representing the arguments of the
22190 parameter pack.
22191
22192 PARM_PACK must be a function parameter pack.
22193 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22194 must point to the subsequent arguments of the function PACK_ARG belongs to.
22195 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22196 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22197 following the last one for which a DIE was generated. */
22198
22199 static dw_die_ref
22200 gen_formal_parameter_pack_die (tree parm_pack,
22201 tree pack_arg,
22202 dw_die_ref subr_die,
22203 tree *next_arg)
22204 {
22205 tree arg;
22206 dw_die_ref parm_pack_die;
22207
22208 gcc_assert (parm_pack
22209 && lang_hooks.function_parameter_pack_p (parm_pack)
22210 && subr_die);
22211
22212 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22213 add_src_coords_attributes (parm_pack_die, parm_pack);
22214
22215 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22216 {
22217 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22218 parm_pack))
22219 break;
22220 gen_formal_parameter_die (arg, NULL,
22221 false /* Don't emit name attribute. */,
22222 parm_pack_die);
22223 }
22224 if (next_arg)
22225 *next_arg = arg;
22226 return parm_pack_die;
22227 }
22228
22229 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22230 at the end of an (ANSI prototyped) formal parameters list. */
22231
22232 static void
22233 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22234 {
22235 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22236 }
22237
22238 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22239 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22240 parameters as specified in some function type specification (except for
22241 those which appear as part of a function *definition*). */
22242
22243 static void
22244 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22245 {
22246 tree link;
22247 tree formal_type = NULL;
22248 tree first_parm_type;
22249 tree arg;
22250
22251 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22252 {
22253 arg = DECL_ARGUMENTS (function_or_method_type);
22254 function_or_method_type = TREE_TYPE (function_or_method_type);
22255 }
22256 else
22257 arg = NULL_TREE;
22258
22259 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22260
22261 /* Make our first pass over the list of formal parameter types and output a
22262 DW_TAG_formal_parameter DIE for each one. */
22263 for (link = first_parm_type; link; )
22264 {
22265 dw_die_ref parm_die;
22266
22267 formal_type = TREE_VALUE (link);
22268 if (formal_type == void_type_node)
22269 break;
22270
22271 /* Output a (nameless) DIE to represent the formal parameter itself. */
22272 if (!POINTER_BOUNDS_TYPE_P (formal_type))
22273 {
22274 parm_die = gen_formal_parameter_die (formal_type, NULL,
22275 true /* Emit name attribute. */,
22276 context_die);
22277 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22278 && link == first_parm_type)
22279 {
22280 add_AT_flag (parm_die, DW_AT_artificial, 1);
22281 if (dwarf_version >= 3 || !dwarf_strict)
22282 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22283 }
22284 else if (arg && DECL_ARTIFICIAL (arg))
22285 add_AT_flag (parm_die, DW_AT_artificial, 1);
22286 }
22287
22288 link = TREE_CHAIN (link);
22289 if (arg)
22290 arg = DECL_CHAIN (arg);
22291 }
22292
22293 /* If this function type has an ellipsis, add a
22294 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22295 if (formal_type != void_type_node)
22296 gen_unspecified_parameters_die (function_or_method_type, context_die);
22297
22298 /* Make our second (and final) pass over the list of formal parameter types
22299 and output DIEs to represent those types (as necessary). */
22300 for (link = TYPE_ARG_TYPES (function_or_method_type);
22301 link && TREE_VALUE (link);
22302 link = TREE_CHAIN (link))
22303 gen_type_die (TREE_VALUE (link), context_die);
22304 }
22305
22306 /* We want to generate the DIE for TYPE so that we can generate the
22307 die for MEMBER, which has been defined; we will need to refer back
22308 to the member declaration nested within TYPE. If we're trying to
22309 generate minimal debug info for TYPE, processing TYPE won't do the
22310 trick; we need to attach the member declaration by hand. */
22311
22312 static void
22313 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22314 {
22315 gen_type_die (type, context_die);
22316
22317 /* If we're trying to avoid duplicate debug info, we may not have
22318 emitted the member decl for this function. Emit it now. */
22319 if (TYPE_STUB_DECL (type)
22320 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22321 && ! lookup_decl_die (member))
22322 {
22323 dw_die_ref type_die;
22324 gcc_assert (!decl_ultimate_origin (member));
22325
22326 push_decl_scope (type);
22327 type_die = lookup_type_die_strip_naming_typedef (type);
22328 if (TREE_CODE (member) == FUNCTION_DECL)
22329 gen_subprogram_die (member, type_die);
22330 else if (TREE_CODE (member) == FIELD_DECL)
22331 {
22332 /* Ignore the nameless fields that are used to skip bits but handle
22333 C++ anonymous unions and structs. */
22334 if (DECL_NAME (member) != NULL_TREE
22335 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22336 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22337 {
22338 struct vlr_context vlr_ctx = {
22339 DECL_CONTEXT (member), /* struct_type */
22340 NULL_TREE /* variant_part_offset */
22341 };
22342 gen_type_die (member_declared_type (member), type_die);
22343 gen_field_die (member, &vlr_ctx, type_die);
22344 }
22345 }
22346 else
22347 gen_variable_die (member, NULL_TREE, type_die);
22348
22349 pop_decl_scope ();
22350 }
22351 }
22352 \f
22353 /* Forward declare these functions, because they are mutually recursive
22354 with their set_block_* pairing functions. */
22355 static void set_decl_origin_self (tree);
22356
22357 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22358 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22359 that it points to the node itself, thus indicating that the node is its
22360 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22361 the given node is NULL, recursively descend the decl/block tree which
22362 it is the root of, and for each other ..._DECL or BLOCK node contained
22363 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22364 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22365 values to point to themselves. */
22366
22367 static void
22368 set_block_origin_self (tree stmt)
22369 {
22370 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22371 {
22372 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22373
22374 {
22375 tree local_decl;
22376
22377 for (local_decl = BLOCK_VARS (stmt);
22378 local_decl != NULL_TREE;
22379 local_decl = DECL_CHAIN (local_decl))
22380 /* Do not recurse on nested functions since the inlining status
22381 of parent and child can be different as per the DWARF spec. */
22382 if (TREE_CODE (local_decl) != FUNCTION_DECL
22383 && !DECL_EXTERNAL (local_decl))
22384 set_decl_origin_self (local_decl);
22385 }
22386
22387 {
22388 tree subblock;
22389
22390 for (subblock = BLOCK_SUBBLOCKS (stmt);
22391 subblock != NULL_TREE;
22392 subblock = BLOCK_CHAIN (subblock))
22393 set_block_origin_self (subblock); /* Recurse. */
22394 }
22395 }
22396 }
22397
22398 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22399 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22400 node to so that it points to the node itself, thus indicating that the
22401 node represents its own (abstract) origin. Additionally, if the
22402 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22403 the decl/block tree of which the given node is the root of, and for
22404 each other ..._DECL or BLOCK node contained therein whose
22405 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22406 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22407 point to themselves. */
22408
22409 static void
22410 set_decl_origin_self (tree decl)
22411 {
22412 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22413 {
22414 DECL_ABSTRACT_ORIGIN (decl) = decl;
22415 if (TREE_CODE (decl) == FUNCTION_DECL)
22416 {
22417 tree arg;
22418
22419 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22420 DECL_ABSTRACT_ORIGIN (arg) = arg;
22421 if (DECL_INITIAL (decl) != NULL_TREE
22422 && DECL_INITIAL (decl) != error_mark_node)
22423 set_block_origin_self (DECL_INITIAL (decl));
22424 }
22425 }
22426 }
22427 \f
22428 /* Mark the early DIE for DECL as the abstract instance. */
22429
22430 static void
22431 dwarf2out_abstract_function (tree decl)
22432 {
22433 dw_die_ref old_die;
22434
22435 /* Make sure we have the actual abstract inline, not a clone. */
22436 decl = DECL_ORIGIN (decl);
22437
22438 if (DECL_IGNORED_P (decl))
22439 return;
22440
22441 old_die = lookup_decl_die (decl);
22442 /* With early debug we always have an old DIE unless we are in LTO
22443 and the user did not compile but only link with debug. */
22444 if (in_lto_p && ! old_die)
22445 return;
22446 gcc_assert (old_die != NULL);
22447 if (get_AT (old_die, DW_AT_inline)
22448 || get_AT (old_die, DW_AT_abstract_origin))
22449 /* We've already generated the abstract instance. */
22450 return;
22451
22452 /* Go ahead and put DW_AT_inline on the DIE. */
22453 if (DECL_DECLARED_INLINE_P (decl))
22454 {
22455 if (cgraph_function_possibly_inlined_p (decl))
22456 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22457 else
22458 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22459 }
22460 else
22461 {
22462 if (cgraph_function_possibly_inlined_p (decl))
22463 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22464 else
22465 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22466 }
22467
22468 if (DECL_DECLARED_INLINE_P (decl)
22469 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22470 add_AT_flag (old_die, DW_AT_artificial, 1);
22471
22472 set_decl_origin_self (decl);
22473 }
22474
22475 /* Helper function of premark_used_types() which gets called through
22476 htab_traverse.
22477
22478 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22479 marked as unused by prune_unused_types. */
22480
22481 bool
22482 premark_used_types_helper (tree const &type, void *)
22483 {
22484 dw_die_ref die;
22485
22486 die = lookup_type_die (type);
22487 if (die != NULL)
22488 die->die_perennial_p = 1;
22489 return true;
22490 }
22491
22492 /* Helper function of premark_types_used_by_global_vars which gets called
22493 through htab_traverse.
22494
22495 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22496 marked as unused by prune_unused_types. The DIE of the type is marked
22497 only if the global variable using the type will actually be emitted. */
22498
22499 int
22500 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22501 void *)
22502 {
22503 struct types_used_by_vars_entry *entry;
22504 dw_die_ref die;
22505
22506 entry = (struct types_used_by_vars_entry *) *slot;
22507 gcc_assert (entry->type != NULL
22508 && entry->var_decl != NULL);
22509 die = lookup_type_die (entry->type);
22510 if (die)
22511 {
22512 /* Ask cgraph if the global variable really is to be emitted.
22513 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22514 varpool_node *node = varpool_node::get (entry->var_decl);
22515 if (node && node->definition)
22516 {
22517 die->die_perennial_p = 1;
22518 /* Keep the parent DIEs as well. */
22519 while ((die = die->die_parent) && die->die_perennial_p == 0)
22520 die->die_perennial_p = 1;
22521 }
22522 }
22523 return 1;
22524 }
22525
22526 /* Mark all members of used_types_hash as perennial. */
22527
22528 static void
22529 premark_used_types (struct function *fun)
22530 {
22531 if (fun && fun->used_types_hash)
22532 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22533 }
22534
22535 /* Mark all members of types_used_by_vars_entry as perennial. */
22536
22537 static void
22538 premark_types_used_by_global_vars (void)
22539 {
22540 if (types_used_by_vars_hash)
22541 types_used_by_vars_hash
22542 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22543 }
22544
22545 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22546 for CA_LOC call arg loc node. */
22547
22548 static dw_die_ref
22549 gen_call_site_die (tree decl, dw_die_ref subr_die,
22550 struct call_arg_loc_node *ca_loc)
22551 {
22552 dw_die_ref stmt_die = NULL, die;
22553 tree block = ca_loc->block;
22554
22555 while (block
22556 && block != DECL_INITIAL (decl)
22557 && TREE_CODE (block) == BLOCK)
22558 {
22559 stmt_die = BLOCK_DIE (block);
22560 if (stmt_die)
22561 break;
22562 block = BLOCK_SUPERCONTEXT (block);
22563 }
22564 if (stmt_die == NULL)
22565 stmt_die = subr_die;
22566 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22567 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22568 if (ca_loc->tail_call_p)
22569 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22570 if (ca_loc->symbol_ref)
22571 {
22572 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22573 if (tdie)
22574 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22575 else
22576 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22577 false);
22578 }
22579 return die;
22580 }
22581
22582 /* Generate a DIE to represent a declared function (either file-scope or
22583 block-local). */
22584
22585 static void
22586 gen_subprogram_die (tree decl, dw_die_ref context_die)
22587 {
22588 tree origin = decl_ultimate_origin (decl);
22589 dw_die_ref subr_die;
22590 dw_die_ref old_die = lookup_decl_die (decl);
22591
22592 /* This function gets called multiple times for different stages of
22593 the debug process. For example, for func() in this code:
22594
22595 namespace S
22596 {
22597 void func() { ... }
22598 }
22599
22600 ...we get called 4 times. Twice in early debug and twice in
22601 late debug:
22602
22603 Early debug
22604 -----------
22605
22606 1. Once while generating func() within the namespace. This is
22607 the declaration. The declaration bit below is set, as the
22608 context is the namespace.
22609
22610 A new DIE will be generated with DW_AT_declaration set.
22611
22612 2. Once for func() itself. This is the specification. The
22613 declaration bit below is clear as the context is the CU.
22614
22615 We will use the cached DIE from (1) to create a new DIE with
22616 DW_AT_specification pointing to the declaration in (1).
22617
22618 Late debug via rest_of_handle_final()
22619 -------------------------------------
22620
22621 3. Once generating func() within the namespace. This is also the
22622 declaration, as in (1), but this time we will early exit below
22623 as we have a cached DIE and a declaration needs no additional
22624 annotations (no locations), as the source declaration line
22625 info is enough.
22626
22627 4. Once for func() itself. As in (2), this is the specification,
22628 but this time we will re-use the cached DIE, and just annotate
22629 it with the location information that should now be available.
22630
22631 For something without namespaces, but with abstract instances, we
22632 are also called a multiple times:
22633
22634 class Base
22635 {
22636 public:
22637 Base (); // constructor declaration (1)
22638 };
22639
22640 Base::Base () { } // constructor specification (2)
22641
22642 Early debug
22643 -----------
22644
22645 1. Once for the Base() constructor by virtue of it being a
22646 member of the Base class. This is done via
22647 rest_of_type_compilation.
22648
22649 This is a declaration, so a new DIE will be created with
22650 DW_AT_declaration.
22651
22652 2. Once for the Base() constructor definition, but this time
22653 while generating the abstract instance of the base
22654 constructor (__base_ctor) which is being generated via early
22655 debug of reachable functions.
22656
22657 Even though we have a cached version of the declaration (1),
22658 we will create a DW_AT_specification of the declaration DIE
22659 in (1).
22660
22661 3. Once for the __base_ctor itself, but this time, we generate
22662 an DW_AT_abstract_origin version of the DW_AT_specification in
22663 (2).
22664
22665 Late debug via rest_of_handle_final
22666 -----------------------------------
22667
22668 4. One final time for the __base_ctor (which will have a cached
22669 DIE with DW_AT_abstract_origin created in (3). This time,
22670 we will just annotate the location information now
22671 available.
22672 */
22673 int declaration = (current_function_decl != decl
22674 || class_or_namespace_scope_p (context_die));
22675
22676 /* A declaration that has been previously dumped needs no
22677 additional information. */
22678 if (old_die && declaration)
22679 return;
22680
22681 /* Now that the C++ front end lazily declares artificial member fns, we
22682 might need to retrofit the declaration into its class. */
22683 if (!declaration && !origin && !old_die
22684 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22685 && !class_or_namespace_scope_p (context_die)
22686 && debug_info_level > DINFO_LEVEL_TERSE)
22687 old_die = force_decl_die (decl);
22688
22689 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22690 if (origin != NULL)
22691 {
22692 gcc_assert (!declaration || local_scope_p (context_die));
22693
22694 /* Fixup die_parent for the abstract instance of a nested
22695 inline function. */
22696 if (old_die && old_die->die_parent == NULL)
22697 add_child_die (context_die, old_die);
22698
22699 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22700 {
22701 /* If we have a DW_AT_abstract_origin we have a working
22702 cached version. */
22703 subr_die = old_die;
22704 }
22705 else
22706 {
22707 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22708 add_abstract_origin_attribute (subr_die, origin);
22709 /* This is where the actual code for a cloned function is.
22710 Let's emit linkage name attribute for it. This helps
22711 debuggers to e.g, set breakpoints into
22712 constructors/destructors when the user asks "break
22713 K::K". */
22714 add_linkage_name (subr_die, decl);
22715 }
22716 }
22717 /* A cached copy, possibly from early dwarf generation. Reuse as
22718 much as possible. */
22719 else if (old_die)
22720 {
22721 if (!get_AT_flag (old_die, DW_AT_declaration)
22722 /* We can have a normal definition following an inline one in the
22723 case of redefinition of GNU C extern inlines.
22724 It seems reasonable to use AT_specification in this case. */
22725 && !get_AT (old_die, DW_AT_inline))
22726 {
22727 /* Detect and ignore this case, where we are trying to output
22728 something we have already output. */
22729 if (get_AT (old_die, DW_AT_low_pc)
22730 || get_AT (old_die, DW_AT_ranges))
22731 return;
22732
22733 /* If we have no location information, this must be a
22734 partially generated DIE from early dwarf generation.
22735 Fall through and generate it. */
22736 }
22737
22738 /* If the definition comes from the same place as the declaration,
22739 maybe use the old DIE. We always want the DIE for this function
22740 that has the *_pc attributes to be under comp_unit_die so the
22741 debugger can find it. We also need to do this for abstract
22742 instances of inlines, since the spec requires the out-of-line copy
22743 to have the same parent. For local class methods, this doesn't
22744 apply; we just use the old DIE. */
22745 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22746 struct dwarf_file_data * file_index = lookup_filename (s.file);
22747 if ((is_cu_die (old_die->die_parent)
22748 /* This condition fixes the inconsistency/ICE with the
22749 following Fortran test (or some derivative thereof) while
22750 building libgfortran:
22751
22752 module some_m
22753 contains
22754 logical function funky (FLAG)
22755 funky = .true.
22756 end function
22757 end module
22758 */
22759 || (old_die->die_parent
22760 && old_die->die_parent->die_tag == DW_TAG_module)
22761 || context_die == NULL)
22762 && (DECL_ARTIFICIAL (decl)
22763 /* The location attributes may be in the abstract origin
22764 which in the case of LTO might be not available to
22765 look at. */
22766 || get_AT (old_die, DW_AT_abstract_origin)
22767 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22768 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22769 == (unsigned) s.line)
22770 && (!debug_column_info
22771 || s.column == 0
22772 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22773 == (unsigned) s.column)))))
22774 {
22775 subr_die = old_die;
22776
22777 /* Clear out the declaration attribute, but leave the
22778 parameters so they can be augmented with location
22779 information later. Unless this was a declaration, in
22780 which case, wipe out the nameless parameters and recreate
22781 them further down. */
22782 if (remove_AT (subr_die, DW_AT_declaration))
22783 {
22784
22785 remove_AT (subr_die, DW_AT_object_pointer);
22786 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22787 }
22788 }
22789 /* Make a specification pointing to the previously built
22790 declaration. */
22791 else
22792 {
22793 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22794 add_AT_specification (subr_die, old_die);
22795 add_pubname (decl, subr_die);
22796 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22797 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22798 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22799 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22800 if (debug_column_info
22801 && s.column
22802 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22803 != (unsigned) s.column))
22804 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22805
22806 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22807 emit the real type on the definition die. */
22808 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22809 {
22810 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22811 if (die == auto_die || die == decltype_auto_die)
22812 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22813 TYPE_UNQUALIFIED, false, context_die);
22814 }
22815
22816 /* When we process the method declaration, we haven't seen
22817 the out-of-class defaulted definition yet, so we have to
22818 recheck now. */
22819 if ((dwarf_version >= 5 || ! dwarf_strict)
22820 && !get_AT (subr_die, DW_AT_defaulted))
22821 {
22822 int defaulted
22823 = lang_hooks.decls.decl_dwarf_attribute (decl,
22824 DW_AT_defaulted);
22825 if (defaulted != -1)
22826 {
22827 /* Other values must have been handled before. */
22828 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22829 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22830 }
22831 }
22832 }
22833 }
22834 /* Create a fresh DIE for anything else. */
22835 else
22836 {
22837 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22838
22839 if (TREE_PUBLIC (decl))
22840 add_AT_flag (subr_die, DW_AT_external, 1);
22841
22842 add_name_and_src_coords_attributes (subr_die, decl);
22843 add_pubname (decl, subr_die);
22844 if (debug_info_level > DINFO_LEVEL_TERSE)
22845 {
22846 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22847 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22848 TYPE_UNQUALIFIED, false, context_die);
22849 }
22850
22851 add_pure_or_virtual_attribute (subr_die, decl);
22852 if (DECL_ARTIFICIAL (decl))
22853 add_AT_flag (subr_die, DW_AT_artificial, 1);
22854
22855 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22856 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22857
22858 add_alignment_attribute (subr_die, decl);
22859
22860 add_accessibility_attribute (subr_die, decl);
22861 }
22862
22863 /* Unless we have an existing non-declaration DIE, equate the new
22864 DIE. */
22865 if (!old_die || is_declaration_die (old_die))
22866 equate_decl_number_to_die (decl, subr_die);
22867
22868 if (declaration)
22869 {
22870 if (!old_die || !get_AT (old_die, DW_AT_inline))
22871 {
22872 add_AT_flag (subr_die, DW_AT_declaration, 1);
22873
22874 /* If this is an explicit function declaration then generate
22875 a DW_AT_explicit attribute. */
22876 if ((dwarf_version >= 3 || !dwarf_strict)
22877 && lang_hooks.decls.decl_dwarf_attribute (decl,
22878 DW_AT_explicit) == 1)
22879 add_AT_flag (subr_die, DW_AT_explicit, 1);
22880
22881 /* If this is a C++11 deleted special function member then generate
22882 a DW_AT_deleted attribute. */
22883 if ((dwarf_version >= 5 || !dwarf_strict)
22884 && lang_hooks.decls.decl_dwarf_attribute (decl,
22885 DW_AT_deleted) == 1)
22886 add_AT_flag (subr_die, DW_AT_deleted, 1);
22887
22888 /* If this is a C++11 defaulted special function member then
22889 generate a DW_AT_defaulted attribute. */
22890 if (dwarf_version >= 5 || !dwarf_strict)
22891 {
22892 int defaulted
22893 = lang_hooks.decls.decl_dwarf_attribute (decl,
22894 DW_AT_defaulted);
22895 if (defaulted != -1)
22896 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22897 }
22898
22899 /* If this is a C++11 non-static member function with & ref-qualifier
22900 then generate a DW_AT_reference attribute. */
22901 if ((dwarf_version >= 5 || !dwarf_strict)
22902 && lang_hooks.decls.decl_dwarf_attribute (decl,
22903 DW_AT_reference) == 1)
22904 add_AT_flag (subr_die, DW_AT_reference, 1);
22905
22906 /* If this is a C++11 non-static member function with &&
22907 ref-qualifier then generate a DW_AT_reference attribute. */
22908 if ((dwarf_version >= 5 || !dwarf_strict)
22909 && lang_hooks.decls.decl_dwarf_attribute (decl,
22910 DW_AT_rvalue_reference)
22911 == 1)
22912 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22913 }
22914 }
22915 /* For non DECL_EXTERNALs, if range information is available, fill
22916 the DIE with it. */
22917 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22918 {
22919 HOST_WIDE_INT cfa_fb_offset;
22920
22921 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22922
22923 if (!crtl->has_bb_partition)
22924 {
22925 dw_fde_ref fde = fun->fde;
22926 if (fde->dw_fde_begin)
22927 {
22928 /* We have already generated the labels. */
22929 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22930 fde->dw_fde_end, false);
22931 }
22932 else
22933 {
22934 /* Create start/end labels and add the range. */
22935 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22936 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22937 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22938 current_function_funcdef_no);
22939 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22940 current_function_funcdef_no);
22941 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22942 false);
22943 }
22944
22945 #if VMS_DEBUGGING_INFO
22946 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22947 Section 2.3 Prologue and Epilogue Attributes:
22948 When a breakpoint is set on entry to a function, it is generally
22949 desirable for execution to be suspended, not on the very first
22950 instruction of the function, but rather at a point after the
22951 function's frame has been set up, after any language defined local
22952 declaration processing has been completed, and before execution of
22953 the first statement of the function begins. Debuggers generally
22954 cannot properly determine where this point is. Similarly for a
22955 breakpoint set on exit from a function. The prologue and epilogue
22956 attributes allow a compiler to communicate the location(s) to use. */
22957
22958 {
22959 if (fde->dw_fde_vms_end_prologue)
22960 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22961 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22962
22963 if (fde->dw_fde_vms_begin_epilogue)
22964 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22965 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22966 }
22967 #endif
22968
22969 }
22970 else
22971 {
22972 /* Generate pubnames entries for the split function code ranges. */
22973 dw_fde_ref fde = fun->fde;
22974
22975 if (fde->dw_fde_second_begin)
22976 {
22977 if (dwarf_version >= 3 || !dwarf_strict)
22978 {
22979 /* We should use ranges for non-contiguous code section
22980 addresses. Use the actual code range for the initial
22981 section, since the HOT/COLD labels might precede an
22982 alignment offset. */
22983 bool range_list_added = false;
22984 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
22985 fde->dw_fde_end, &range_list_added,
22986 false);
22987 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
22988 fde->dw_fde_second_end,
22989 &range_list_added, false);
22990 if (range_list_added)
22991 add_ranges (NULL);
22992 }
22993 else
22994 {
22995 /* There is no real support in DW2 for this .. so we make
22996 a work-around. First, emit the pub name for the segment
22997 containing the function label. Then make and emit a
22998 simplified subprogram DIE for the second segment with the
22999 name pre-fixed by __hot/cold_sect_of_. We use the same
23000 linkage name for the second die so that gdb will find both
23001 sections when given "b foo". */
23002 const char *name = NULL;
23003 tree decl_name = DECL_NAME (decl);
23004 dw_die_ref seg_die;
23005
23006 /* Do the 'primary' section. */
23007 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23008 fde->dw_fde_end, false);
23009
23010 /* Build a minimal DIE for the secondary section. */
23011 seg_die = new_die (DW_TAG_subprogram,
23012 subr_die->die_parent, decl);
23013
23014 if (TREE_PUBLIC (decl))
23015 add_AT_flag (seg_die, DW_AT_external, 1);
23016
23017 if (decl_name != NULL
23018 && IDENTIFIER_POINTER (decl_name) != NULL)
23019 {
23020 name = dwarf2_name (decl, 1);
23021 if (! DECL_ARTIFICIAL (decl))
23022 add_src_coords_attributes (seg_die, decl);
23023
23024 add_linkage_name (seg_die, decl);
23025 }
23026 gcc_assert (name != NULL);
23027 add_pure_or_virtual_attribute (seg_die, decl);
23028 if (DECL_ARTIFICIAL (decl))
23029 add_AT_flag (seg_die, DW_AT_artificial, 1);
23030
23031 name = concat ("__second_sect_of_", name, NULL);
23032 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23033 fde->dw_fde_second_end, false);
23034 add_name_attribute (seg_die, name);
23035 if (want_pubnames ())
23036 add_pubname_string (name, seg_die);
23037 }
23038 }
23039 else
23040 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23041 false);
23042 }
23043
23044 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23045
23046 /* We define the "frame base" as the function's CFA. This is more
23047 convenient for several reasons: (1) It's stable across the prologue
23048 and epilogue, which makes it better than just a frame pointer,
23049 (2) With dwarf3, there exists a one-byte encoding that allows us
23050 to reference the .debug_frame data by proxy, but failing that,
23051 (3) We can at least reuse the code inspection and interpretation
23052 code that determines the CFA position at various points in the
23053 function. */
23054 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23055 {
23056 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23057 add_AT_loc (subr_die, DW_AT_frame_base, op);
23058 }
23059 else
23060 {
23061 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23062 if (list->dw_loc_next)
23063 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23064 else
23065 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23066 }
23067
23068 /* Compute a displacement from the "steady-state frame pointer" to
23069 the CFA. The former is what all stack slots and argument slots
23070 will reference in the rtl; the latter is what we've told the
23071 debugger about. We'll need to adjust all frame_base references
23072 by this displacement. */
23073 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23074
23075 if (fun->static_chain_decl)
23076 {
23077 /* DWARF requires here a location expression that computes the
23078 address of the enclosing subprogram's frame base. The machinery
23079 in tree-nested.c is supposed to store this specific address in the
23080 last field of the FRAME record. */
23081 const tree frame_type
23082 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23083 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23084
23085 tree fb_expr
23086 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23087 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23088 fb_expr, fb_decl, NULL_TREE);
23089
23090 add_AT_location_description (subr_die, DW_AT_static_link,
23091 loc_list_from_tree (fb_expr, 0, NULL));
23092 }
23093
23094 resolve_variable_values ();
23095 }
23096
23097 /* Generate child dies for template paramaters. */
23098 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23099 gen_generic_params_dies (decl);
23100
23101 /* Now output descriptions of the arguments for this function. This gets
23102 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23103 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23104 `...' at the end of the formal parameter list. In order to find out if
23105 there was a trailing ellipsis or not, we must instead look at the type
23106 associated with the FUNCTION_DECL. This will be a node of type
23107 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23108 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23109 an ellipsis at the end. */
23110
23111 /* In the case where we are describing a mere function declaration, all we
23112 need to do here (and all we *can* do here) is to describe the *types* of
23113 its formal parameters. */
23114 if (debug_info_level <= DINFO_LEVEL_TERSE)
23115 ;
23116 else if (declaration)
23117 gen_formal_types_die (decl, subr_die);
23118 else
23119 {
23120 /* Generate DIEs to represent all known formal parameters. */
23121 tree parm = DECL_ARGUMENTS (decl);
23122 tree generic_decl = early_dwarf
23123 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23124 tree generic_decl_parm = generic_decl
23125 ? DECL_ARGUMENTS (generic_decl)
23126 : NULL;
23127
23128 /* Now we want to walk the list of parameters of the function and
23129 emit their relevant DIEs.
23130
23131 We consider the case of DECL being an instance of a generic function
23132 as well as it being a normal function.
23133
23134 If DECL is an instance of a generic function we walk the
23135 parameters of the generic function declaration _and_ the parameters of
23136 DECL itself. This is useful because we want to emit specific DIEs for
23137 function parameter packs and those are declared as part of the
23138 generic function declaration. In that particular case,
23139 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23140 That DIE has children DIEs representing the set of arguments
23141 of the pack. Note that the set of pack arguments can be empty.
23142 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23143 children DIE.
23144
23145 Otherwise, we just consider the parameters of DECL. */
23146 while (generic_decl_parm || parm)
23147 {
23148 if (generic_decl_parm
23149 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23150 gen_formal_parameter_pack_die (generic_decl_parm,
23151 parm, subr_die,
23152 &parm);
23153 else if (parm && !POINTER_BOUNDS_P (parm))
23154 {
23155 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23156
23157 if (early_dwarf
23158 && parm == DECL_ARGUMENTS (decl)
23159 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23160 && parm_die
23161 && (dwarf_version >= 3 || !dwarf_strict))
23162 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23163
23164 parm = DECL_CHAIN (parm);
23165 }
23166 else if (parm)
23167 parm = DECL_CHAIN (parm);
23168
23169 if (generic_decl_parm)
23170 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23171 }
23172
23173 /* Decide whether we need an unspecified_parameters DIE at the end.
23174 There are 2 more cases to do this for: 1) the ansi ... declaration -
23175 this is detectable when the end of the arg list is not a
23176 void_type_node 2) an unprototyped function declaration (not a
23177 definition). This just means that we have no info about the
23178 parameters at all. */
23179 if (early_dwarf)
23180 {
23181 if (prototype_p (TREE_TYPE (decl)))
23182 {
23183 /* This is the prototyped case, check for.... */
23184 if (stdarg_p (TREE_TYPE (decl)))
23185 gen_unspecified_parameters_die (decl, subr_die);
23186 }
23187 else if (DECL_INITIAL (decl) == NULL_TREE)
23188 gen_unspecified_parameters_die (decl, subr_die);
23189 }
23190 }
23191
23192 if (subr_die != old_die)
23193 /* Add the calling convention attribute if requested. */
23194 add_calling_convention_attribute (subr_die, decl);
23195
23196 /* Output Dwarf info for all of the stuff within the body of the function
23197 (if it has one - it may be just a declaration).
23198
23199 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23200 a function. This BLOCK actually represents the outermost binding contour
23201 for the function, i.e. the contour in which the function's formal
23202 parameters and labels get declared. Curiously, it appears that the front
23203 end doesn't actually put the PARM_DECL nodes for the current function onto
23204 the BLOCK_VARS list for this outer scope, but are strung off of the
23205 DECL_ARGUMENTS list for the function instead.
23206
23207 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23208 the LABEL_DECL nodes for the function however, and we output DWARF info
23209 for those in decls_for_scope. Just within the `outer_scope' there will be
23210 a BLOCK node representing the function's outermost pair of curly braces,
23211 and any blocks used for the base and member initializers of a C++
23212 constructor function. */
23213 tree outer_scope = DECL_INITIAL (decl);
23214 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23215 {
23216 int call_site_note_count = 0;
23217 int tail_call_site_note_count = 0;
23218
23219 /* Emit a DW_TAG_variable DIE for a named return value. */
23220 if (DECL_NAME (DECL_RESULT (decl)))
23221 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23222
23223 /* The first time through decls_for_scope we will generate the
23224 DIEs for the locals. The second time, we fill in the
23225 location info. */
23226 decls_for_scope (outer_scope, subr_die);
23227
23228 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23229 {
23230 struct call_arg_loc_node *ca_loc;
23231 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23232 {
23233 dw_die_ref die = NULL;
23234 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23235 rtx arg, next_arg;
23236
23237 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23238 ? XEXP (ca_loc->call_arg_loc_note, 0)
23239 : NULL_RTX);
23240 arg; arg = next_arg)
23241 {
23242 dw_loc_descr_ref reg, val;
23243 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23244 dw_die_ref cdie, tdie = NULL;
23245
23246 next_arg = XEXP (arg, 1);
23247 if (REG_P (XEXP (XEXP (arg, 0), 0))
23248 && next_arg
23249 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23250 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23251 && REGNO (XEXP (XEXP (arg, 0), 0))
23252 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23253 next_arg = XEXP (next_arg, 1);
23254 if (mode == VOIDmode)
23255 {
23256 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23257 if (mode == VOIDmode)
23258 mode = GET_MODE (XEXP (arg, 0));
23259 }
23260 if (mode == VOIDmode || mode == BLKmode)
23261 continue;
23262 /* Get dynamic information about call target only if we
23263 have no static information: we cannot generate both
23264 DW_AT_call_origin and DW_AT_call_target
23265 attributes. */
23266 if (ca_loc->symbol_ref == NULL_RTX)
23267 {
23268 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23269 {
23270 tloc = XEXP (XEXP (arg, 0), 1);
23271 continue;
23272 }
23273 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23274 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23275 {
23276 tlocc = XEXP (XEXP (arg, 0), 1);
23277 continue;
23278 }
23279 }
23280 reg = NULL;
23281 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23282 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23283 VAR_INIT_STATUS_INITIALIZED);
23284 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23285 {
23286 rtx mem = XEXP (XEXP (arg, 0), 0);
23287 reg = mem_loc_descriptor (XEXP (mem, 0),
23288 get_address_mode (mem),
23289 GET_MODE (mem),
23290 VAR_INIT_STATUS_INITIALIZED);
23291 }
23292 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23293 == DEBUG_PARAMETER_REF)
23294 {
23295 tree tdecl
23296 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23297 tdie = lookup_decl_die (tdecl);
23298 if (tdie == NULL)
23299 continue;
23300 }
23301 else
23302 continue;
23303 if (reg == NULL
23304 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23305 != DEBUG_PARAMETER_REF)
23306 continue;
23307 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23308 VOIDmode,
23309 VAR_INIT_STATUS_INITIALIZED);
23310 if (val == NULL)
23311 continue;
23312 if (die == NULL)
23313 die = gen_call_site_die (decl, subr_die, ca_loc);
23314 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23315 NULL_TREE);
23316 if (reg != NULL)
23317 add_AT_loc (cdie, DW_AT_location, reg);
23318 else if (tdie != NULL)
23319 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23320 tdie);
23321 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23322 if (next_arg != XEXP (arg, 1))
23323 {
23324 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23325 if (mode == VOIDmode)
23326 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23327 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23328 0), 1),
23329 mode, VOIDmode,
23330 VAR_INIT_STATUS_INITIALIZED);
23331 if (val != NULL)
23332 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23333 val);
23334 }
23335 }
23336 if (die == NULL
23337 && (ca_loc->symbol_ref || tloc))
23338 die = gen_call_site_die (decl, subr_die, ca_loc);
23339 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23340 {
23341 dw_loc_descr_ref tval = NULL;
23342
23343 if (tloc != NULL_RTX)
23344 tval = mem_loc_descriptor (tloc,
23345 GET_MODE (tloc) == VOIDmode
23346 ? Pmode : GET_MODE (tloc),
23347 VOIDmode,
23348 VAR_INIT_STATUS_INITIALIZED);
23349 if (tval)
23350 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23351 else if (tlocc != NULL_RTX)
23352 {
23353 tval = mem_loc_descriptor (tlocc,
23354 GET_MODE (tlocc) == VOIDmode
23355 ? Pmode : GET_MODE (tlocc),
23356 VOIDmode,
23357 VAR_INIT_STATUS_INITIALIZED);
23358 if (tval)
23359 add_AT_loc (die,
23360 dwarf_AT (DW_AT_call_target_clobbered),
23361 tval);
23362 }
23363 }
23364 if (die != NULL)
23365 {
23366 call_site_note_count++;
23367 if (ca_loc->tail_call_p)
23368 tail_call_site_note_count++;
23369 }
23370 }
23371 }
23372 call_arg_locations = NULL;
23373 call_arg_loc_last = NULL;
23374 if (tail_call_site_count >= 0
23375 && tail_call_site_count == tail_call_site_note_count
23376 && (!dwarf_strict || dwarf_version >= 5))
23377 {
23378 if (call_site_count >= 0
23379 && call_site_count == call_site_note_count)
23380 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23381 else
23382 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23383 }
23384 call_site_count = -1;
23385 tail_call_site_count = -1;
23386 }
23387
23388 /* Mark used types after we have created DIEs for the functions scopes. */
23389 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23390 }
23391
23392 /* Returns a hash value for X (which really is a die_struct). */
23393
23394 hashval_t
23395 block_die_hasher::hash (die_struct *d)
23396 {
23397 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23398 }
23399
23400 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23401 as decl_id and die_parent of die_struct Y. */
23402
23403 bool
23404 block_die_hasher::equal (die_struct *x, die_struct *y)
23405 {
23406 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23407 }
23408
23409 /* Hold information about markers for inlined entry points. */
23410 struct GTY ((for_user)) inline_entry_data
23411 {
23412 /* The block that's the inlined_function_outer_scope for an inlined
23413 function. */
23414 tree block;
23415
23416 /* The label at the inlined entry point. */
23417 const char *label_pfx;
23418 unsigned int label_num;
23419
23420 /* The view number to be used as the inlined entry point. */
23421 var_loc_view view;
23422 };
23423
23424 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23425 {
23426 typedef tree compare_type;
23427 static inline hashval_t hash (const inline_entry_data *);
23428 static inline bool equal (const inline_entry_data *, const_tree);
23429 };
23430
23431 /* Hash table routines for inline_entry_data. */
23432
23433 inline hashval_t
23434 inline_entry_data_hasher::hash (const inline_entry_data *data)
23435 {
23436 return htab_hash_pointer (data->block);
23437 }
23438
23439 inline bool
23440 inline_entry_data_hasher::equal (const inline_entry_data *data,
23441 const_tree block)
23442 {
23443 return data->block == block;
23444 }
23445
23446 /* Inlined entry points pending DIE creation in this compilation unit. */
23447
23448 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23449
23450
23451 /* Return TRUE if DECL, which may have been previously generated as
23452 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23453 true if decl (or its origin) is either an extern declaration or a
23454 class/namespace scoped declaration.
23455
23456 The declare_in_namespace support causes us to get two DIEs for one
23457 variable, both of which are declarations. We want to avoid
23458 considering one to be a specification, so we must test for
23459 DECLARATION and DW_AT_declaration. */
23460 static inline bool
23461 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23462 {
23463 return (old_die && TREE_STATIC (decl) && !declaration
23464 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23465 }
23466
23467 /* Return true if DECL is a local static. */
23468
23469 static inline bool
23470 local_function_static (tree decl)
23471 {
23472 gcc_assert (VAR_P (decl));
23473 return TREE_STATIC (decl)
23474 && DECL_CONTEXT (decl)
23475 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23476 }
23477
23478 /* Generate a DIE to represent a declared data object.
23479 Either DECL or ORIGIN must be non-null. */
23480
23481 static void
23482 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23483 {
23484 HOST_WIDE_INT off = 0;
23485 tree com_decl;
23486 tree decl_or_origin = decl ? decl : origin;
23487 tree ultimate_origin;
23488 dw_die_ref var_die;
23489 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23490 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23491 || class_or_namespace_scope_p (context_die));
23492 bool specialization_p = false;
23493 bool no_linkage_name = false;
23494
23495 /* While C++ inline static data members have definitions inside of the
23496 class, force the first DIE to be a declaration, then let gen_member_die
23497 reparent it to the class context and call gen_variable_die again
23498 to create the outside of the class DIE for the definition. */
23499 if (!declaration
23500 && old_die == NULL
23501 && decl
23502 && DECL_CONTEXT (decl)
23503 && TYPE_P (DECL_CONTEXT (decl))
23504 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23505 {
23506 declaration = true;
23507 if (dwarf_version < 5)
23508 no_linkage_name = true;
23509 }
23510
23511 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23512 if (decl || ultimate_origin)
23513 origin = ultimate_origin;
23514 com_decl = fortran_common (decl_or_origin, &off);
23515
23516 /* Symbol in common gets emitted as a child of the common block, in the form
23517 of a data member. */
23518 if (com_decl)
23519 {
23520 dw_die_ref com_die;
23521 dw_loc_list_ref loc = NULL;
23522 die_node com_die_arg;
23523
23524 var_die = lookup_decl_die (decl_or_origin);
23525 if (var_die)
23526 {
23527 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23528 {
23529 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23530 if (loc)
23531 {
23532 if (off)
23533 {
23534 /* Optimize the common case. */
23535 if (single_element_loc_list_p (loc)
23536 && loc->expr->dw_loc_opc == DW_OP_addr
23537 && loc->expr->dw_loc_next == NULL
23538 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23539 == SYMBOL_REF)
23540 {
23541 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23542 loc->expr->dw_loc_oprnd1.v.val_addr
23543 = plus_constant (GET_MODE (x), x , off);
23544 }
23545 else
23546 loc_list_plus_const (loc, off);
23547 }
23548 add_AT_location_description (var_die, DW_AT_location, loc);
23549 remove_AT (var_die, DW_AT_declaration);
23550 }
23551 }
23552 return;
23553 }
23554
23555 if (common_block_die_table == NULL)
23556 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23557
23558 com_die_arg.decl_id = DECL_UID (com_decl);
23559 com_die_arg.die_parent = context_die;
23560 com_die = common_block_die_table->find (&com_die_arg);
23561 if (! early_dwarf)
23562 loc = loc_list_from_tree (com_decl, 2, NULL);
23563 if (com_die == NULL)
23564 {
23565 const char *cnam
23566 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23567 die_node **slot;
23568
23569 com_die = new_die (DW_TAG_common_block, context_die, decl);
23570 add_name_and_src_coords_attributes (com_die, com_decl);
23571 if (loc)
23572 {
23573 add_AT_location_description (com_die, DW_AT_location, loc);
23574 /* Avoid sharing the same loc descriptor between
23575 DW_TAG_common_block and DW_TAG_variable. */
23576 loc = loc_list_from_tree (com_decl, 2, NULL);
23577 }
23578 else if (DECL_EXTERNAL (decl_or_origin))
23579 add_AT_flag (com_die, DW_AT_declaration, 1);
23580 if (want_pubnames ())
23581 add_pubname_string (cnam, com_die); /* ??? needed? */
23582 com_die->decl_id = DECL_UID (com_decl);
23583 slot = common_block_die_table->find_slot (com_die, INSERT);
23584 *slot = com_die;
23585 }
23586 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23587 {
23588 add_AT_location_description (com_die, DW_AT_location, loc);
23589 loc = loc_list_from_tree (com_decl, 2, NULL);
23590 remove_AT (com_die, DW_AT_declaration);
23591 }
23592 var_die = new_die (DW_TAG_variable, com_die, decl);
23593 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23594 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23595 decl_quals (decl_or_origin), false,
23596 context_die);
23597 add_alignment_attribute (var_die, decl);
23598 add_AT_flag (var_die, DW_AT_external, 1);
23599 if (loc)
23600 {
23601 if (off)
23602 {
23603 /* Optimize the common case. */
23604 if (single_element_loc_list_p (loc)
23605 && loc->expr->dw_loc_opc == DW_OP_addr
23606 && loc->expr->dw_loc_next == NULL
23607 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23608 {
23609 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23610 loc->expr->dw_loc_oprnd1.v.val_addr
23611 = plus_constant (GET_MODE (x), x, off);
23612 }
23613 else
23614 loc_list_plus_const (loc, off);
23615 }
23616 add_AT_location_description (var_die, DW_AT_location, loc);
23617 }
23618 else if (DECL_EXTERNAL (decl_or_origin))
23619 add_AT_flag (var_die, DW_AT_declaration, 1);
23620 if (decl)
23621 equate_decl_number_to_die (decl, var_die);
23622 return;
23623 }
23624
23625 if (old_die)
23626 {
23627 if (declaration)
23628 {
23629 /* A declaration that has been previously dumped, needs no
23630 further annotations, since it doesn't need location on
23631 the second pass. */
23632 return;
23633 }
23634 else if (decl_will_get_specification_p (old_die, decl, declaration)
23635 && !get_AT (old_die, DW_AT_specification))
23636 {
23637 /* Fall-thru so we can make a new variable die along with a
23638 DW_AT_specification. */
23639 }
23640 else if (origin && old_die->die_parent != context_die)
23641 {
23642 /* If we will be creating an inlined instance, we need a
23643 new DIE that will get annotated with
23644 DW_AT_abstract_origin. */
23645 gcc_assert (!DECL_ABSTRACT_P (decl));
23646 }
23647 else
23648 {
23649 /* If a DIE was dumped early, it still needs location info.
23650 Skip to where we fill the location bits. */
23651 var_die = old_die;
23652
23653 /* ??? In LTRANS we cannot annotate early created variably
23654 modified type DIEs without copying them and adjusting all
23655 references to them. Thus we dumped them again. Also add a
23656 reference to them but beware of -g0 compile and -g link
23657 in which case the reference will be already present. */
23658 tree type = TREE_TYPE (decl_or_origin);
23659 if (in_lto_p
23660 && ! get_AT (var_die, DW_AT_type)
23661 && variably_modified_type_p
23662 (type, decl_function_context (decl_or_origin)))
23663 {
23664 if (decl_by_reference_p (decl_or_origin))
23665 add_type_attribute (var_die, TREE_TYPE (type),
23666 TYPE_UNQUALIFIED, false, context_die);
23667 else
23668 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23669 false, context_die);
23670 }
23671
23672 goto gen_variable_die_location;
23673 }
23674 }
23675
23676 /* For static data members, the declaration in the class is supposed
23677 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23678 also in DWARF2; the specification should still be DW_TAG_variable
23679 referencing the DW_TAG_member DIE. */
23680 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23681 var_die = new_die (DW_TAG_member, context_die, decl);
23682 else
23683 var_die = new_die (DW_TAG_variable, context_die, decl);
23684
23685 if (origin != NULL)
23686 add_abstract_origin_attribute (var_die, origin);
23687
23688 /* Loop unrolling can create multiple blocks that refer to the same
23689 static variable, so we must test for the DW_AT_declaration flag.
23690
23691 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23692 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23693 sharing them.
23694
23695 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23696 else if (decl_will_get_specification_p (old_die, decl, declaration))
23697 {
23698 /* This is a definition of a C++ class level static. */
23699 add_AT_specification (var_die, old_die);
23700 specialization_p = true;
23701 if (DECL_NAME (decl))
23702 {
23703 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23704 struct dwarf_file_data * file_index = lookup_filename (s.file);
23705
23706 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23707 add_AT_file (var_die, DW_AT_decl_file, file_index);
23708
23709 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23710 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23711
23712 if (debug_column_info
23713 && s.column
23714 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23715 != (unsigned) s.column))
23716 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23717
23718 if (old_die->die_tag == DW_TAG_member)
23719 add_linkage_name (var_die, decl);
23720 }
23721 }
23722 else
23723 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23724
23725 if ((origin == NULL && !specialization_p)
23726 || (origin != NULL
23727 && !DECL_ABSTRACT_P (decl_or_origin)
23728 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23729 decl_function_context
23730 (decl_or_origin))))
23731 {
23732 tree type = TREE_TYPE (decl_or_origin);
23733
23734 if (decl_by_reference_p (decl_or_origin))
23735 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23736 context_die);
23737 else
23738 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23739 context_die);
23740 }
23741
23742 if (origin == NULL && !specialization_p)
23743 {
23744 if (TREE_PUBLIC (decl))
23745 add_AT_flag (var_die, DW_AT_external, 1);
23746
23747 if (DECL_ARTIFICIAL (decl))
23748 add_AT_flag (var_die, DW_AT_artificial, 1);
23749
23750 add_alignment_attribute (var_die, decl);
23751
23752 add_accessibility_attribute (var_die, decl);
23753 }
23754
23755 if (declaration)
23756 add_AT_flag (var_die, DW_AT_declaration, 1);
23757
23758 if (decl && (DECL_ABSTRACT_P (decl)
23759 || !old_die || is_declaration_die (old_die)))
23760 equate_decl_number_to_die (decl, var_die);
23761
23762 gen_variable_die_location:
23763 if (! declaration
23764 && (! DECL_ABSTRACT_P (decl_or_origin)
23765 /* Local static vars are shared between all clones/inlines,
23766 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23767 already set. */
23768 || (VAR_P (decl_or_origin)
23769 && TREE_STATIC (decl_or_origin)
23770 && DECL_RTL_SET_P (decl_or_origin))))
23771 {
23772 if (early_dwarf)
23773 add_pubname (decl_or_origin, var_die);
23774 else
23775 add_location_or_const_value_attribute (var_die, decl_or_origin,
23776 decl == NULL);
23777 }
23778 else
23779 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23780
23781 if ((dwarf_version >= 4 || !dwarf_strict)
23782 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23783 DW_AT_const_expr) == 1
23784 && !get_AT (var_die, DW_AT_const_expr)
23785 && !specialization_p)
23786 add_AT_flag (var_die, DW_AT_const_expr, 1);
23787
23788 if (!dwarf_strict)
23789 {
23790 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23791 DW_AT_inline);
23792 if (inl != -1
23793 && !get_AT (var_die, DW_AT_inline)
23794 && !specialization_p)
23795 add_AT_unsigned (var_die, DW_AT_inline, inl);
23796 }
23797 }
23798
23799 /* Generate a DIE to represent a named constant. */
23800
23801 static void
23802 gen_const_die (tree decl, dw_die_ref context_die)
23803 {
23804 dw_die_ref const_die;
23805 tree type = TREE_TYPE (decl);
23806
23807 const_die = lookup_decl_die (decl);
23808 if (const_die)
23809 return;
23810
23811 const_die = new_die (DW_TAG_constant, context_die, decl);
23812 equate_decl_number_to_die (decl, const_die);
23813 add_name_and_src_coords_attributes (const_die, decl);
23814 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23815 if (TREE_PUBLIC (decl))
23816 add_AT_flag (const_die, DW_AT_external, 1);
23817 if (DECL_ARTIFICIAL (decl))
23818 add_AT_flag (const_die, DW_AT_artificial, 1);
23819 tree_add_const_value_attribute_for_decl (const_die, decl);
23820 }
23821
23822 /* Generate a DIE to represent a label identifier. */
23823
23824 static void
23825 gen_label_die (tree decl, dw_die_ref context_die)
23826 {
23827 tree origin = decl_ultimate_origin (decl);
23828 dw_die_ref lbl_die = lookup_decl_die (decl);
23829 rtx insn;
23830 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23831
23832 if (!lbl_die)
23833 {
23834 lbl_die = new_die (DW_TAG_label, context_die, decl);
23835 equate_decl_number_to_die (decl, lbl_die);
23836
23837 if (origin != NULL)
23838 add_abstract_origin_attribute (lbl_die, origin);
23839 else
23840 add_name_and_src_coords_attributes (lbl_die, decl);
23841 }
23842
23843 if (DECL_ABSTRACT_P (decl))
23844 equate_decl_number_to_die (decl, lbl_die);
23845 else if (! early_dwarf)
23846 {
23847 insn = DECL_RTL_IF_SET (decl);
23848
23849 /* Deleted labels are programmer specified labels which have been
23850 eliminated because of various optimizations. We still emit them
23851 here so that it is possible to put breakpoints on them. */
23852 if (insn
23853 && (LABEL_P (insn)
23854 || ((NOTE_P (insn)
23855 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23856 {
23857 /* When optimization is enabled (via -O) some parts of the compiler
23858 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23859 represent source-level labels which were explicitly declared by
23860 the user. This really shouldn't be happening though, so catch
23861 it if it ever does happen. */
23862 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23863
23864 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23865 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23866 }
23867 else if (insn
23868 && NOTE_P (insn)
23869 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23870 && CODE_LABEL_NUMBER (insn) != -1)
23871 {
23872 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23873 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23874 }
23875 }
23876 }
23877
23878 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23879 attributes to the DIE for a block STMT, to describe where the inlined
23880 function was called from. This is similar to add_src_coords_attributes. */
23881
23882 static inline void
23883 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23884 {
23885 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23886
23887 if (dwarf_version >= 3 || !dwarf_strict)
23888 {
23889 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23890 add_AT_unsigned (die, DW_AT_call_line, s.line);
23891 if (debug_column_info && s.column)
23892 add_AT_unsigned (die, DW_AT_call_column, s.column);
23893 }
23894 }
23895
23896
23897 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23898 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23899
23900 static inline void
23901 add_high_low_attributes (tree stmt, dw_die_ref die)
23902 {
23903 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23904
23905 if (inline_entry_data **iedp
23906 = !inline_entry_data_table ? NULL
23907 : inline_entry_data_table->find_slot_with_hash (stmt,
23908 htab_hash_pointer (stmt),
23909 NO_INSERT))
23910 {
23911 inline_entry_data *ied = *iedp;
23912 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23913 gcc_assert (debug_inline_points);
23914 gcc_assert (inlined_function_outer_scope_p (stmt));
23915
23916 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23917 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23918
23919 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23920 && !dwarf_strict)
23921 {
23922 if (!output_asm_line_debug_info ())
23923 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23924 else
23925 {
23926 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23927 /* FIXME: this will resolve to a small number. Could we
23928 possibly emit smaller data? Ideally we'd emit a
23929 uleb128, but that would make the size of DIEs
23930 impossible for the compiler to compute, since it's
23931 the assembler that computes the value of the view
23932 label in this case. Ideally, we'd have a single form
23933 encompassing both the address and the view, and
23934 indirecting them through a table might make things
23935 easier, but even that would be more wasteful,
23936 space-wise, than what we have now. */
23937 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23938 }
23939 }
23940
23941 inline_entry_data_table->clear_slot (iedp);
23942 }
23943
23944 if (BLOCK_FRAGMENT_CHAIN (stmt)
23945 && (dwarf_version >= 3 || !dwarf_strict))
23946 {
23947 tree chain, superblock = NULL_TREE;
23948 dw_die_ref pdie;
23949 dw_attr_node *attr = NULL;
23950
23951 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
23952 {
23953 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23954 BLOCK_NUMBER (stmt));
23955 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23956 }
23957
23958 /* Optimize duplicate .debug_ranges lists or even tails of
23959 lists. If this BLOCK has same ranges as its supercontext,
23960 lookup DW_AT_ranges attribute in the supercontext (and
23961 recursively so), verify that the ranges_table contains the
23962 right values and use it instead of adding a new .debug_range. */
23963 for (chain = stmt, pdie = die;
23964 BLOCK_SAME_RANGE (chain);
23965 chain = BLOCK_SUPERCONTEXT (chain))
23966 {
23967 dw_attr_node *new_attr;
23968
23969 pdie = pdie->die_parent;
23970 if (pdie == NULL)
23971 break;
23972 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23973 break;
23974 new_attr = get_AT (pdie, DW_AT_ranges);
23975 if (new_attr == NULL
23976 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
23977 break;
23978 attr = new_attr;
23979 superblock = BLOCK_SUPERCONTEXT (chain);
23980 }
23981 if (attr != NULL
23982 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
23983 == BLOCK_NUMBER (superblock))
23984 && BLOCK_FRAGMENT_CHAIN (superblock))
23985 {
23986 unsigned long off = attr->dw_attr_val.v.val_offset;
23987 unsigned long supercnt = 0, thiscnt = 0;
23988 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
23989 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23990 {
23991 ++supercnt;
23992 gcc_checking_assert ((*ranges_table)[off + supercnt].num
23993 == BLOCK_NUMBER (chain));
23994 }
23995 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
23996 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
23997 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23998 ++thiscnt;
23999 gcc_assert (supercnt >= thiscnt);
24000 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24001 false);
24002 note_rnglist_head (off + supercnt - thiscnt);
24003 return;
24004 }
24005
24006 unsigned int offset = add_ranges (stmt, true);
24007 add_AT_range_list (die, DW_AT_ranges, offset, false);
24008 note_rnglist_head (offset);
24009
24010 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24011 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24012 do
24013 {
24014 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24015 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24016 chain = BLOCK_FRAGMENT_CHAIN (chain);
24017 }
24018 while (chain);
24019 add_ranges (NULL);
24020 }
24021 else
24022 {
24023 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24024 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24025 BLOCK_NUMBER (stmt));
24026 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24027 BLOCK_NUMBER (stmt));
24028 add_AT_low_high_pc (die, label, label_high, false);
24029 }
24030 }
24031
24032 /* Generate a DIE for a lexical block. */
24033
24034 static void
24035 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24036 {
24037 dw_die_ref old_die = BLOCK_DIE (stmt);
24038 dw_die_ref stmt_die = NULL;
24039 if (!old_die)
24040 {
24041 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24042 BLOCK_DIE (stmt) = stmt_die;
24043 }
24044
24045 if (BLOCK_ABSTRACT (stmt))
24046 {
24047 if (old_die)
24048 {
24049 /* This must have been generated early and it won't even
24050 need location information since it's a DW_AT_inline
24051 function. */
24052 if (flag_checking)
24053 for (dw_die_ref c = context_die; c; c = c->die_parent)
24054 if (c->die_tag == DW_TAG_inlined_subroutine
24055 || c->die_tag == DW_TAG_subprogram)
24056 {
24057 gcc_assert (get_AT (c, DW_AT_inline));
24058 break;
24059 }
24060 return;
24061 }
24062 }
24063 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
24064 {
24065 /* If this is an inlined instance, create a new lexical die for
24066 anything below to attach DW_AT_abstract_origin to. */
24067 if (old_die)
24068 {
24069 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24070 BLOCK_DIE (stmt) = stmt_die;
24071 old_die = NULL;
24072 }
24073
24074 tree origin = block_ultimate_origin (stmt);
24075 if (origin != NULL_TREE && origin != stmt)
24076 add_abstract_origin_attribute (stmt_die, origin);
24077 }
24078
24079 if (old_die)
24080 stmt_die = old_die;
24081
24082 /* A non abstract block whose blocks have already been reordered
24083 should have the instruction range for this block. If so, set the
24084 high/low attributes. */
24085 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
24086 {
24087 gcc_assert (stmt_die);
24088 add_high_low_attributes (stmt, stmt_die);
24089 }
24090
24091 decls_for_scope (stmt, stmt_die);
24092 }
24093
24094 /* Generate a DIE for an inlined subprogram. */
24095
24096 static void
24097 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24098 {
24099 tree decl;
24100
24101 /* The instance of function that is effectively being inlined shall not
24102 be abstract. */
24103 gcc_assert (! BLOCK_ABSTRACT (stmt));
24104
24105 decl = block_ultimate_origin (stmt);
24106
24107 /* Make sure any inlined functions are known to be inlineable. */
24108 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24109 || cgraph_function_possibly_inlined_p (decl));
24110
24111 if (! BLOCK_ABSTRACT (stmt))
24112 {
24113 dw_die_ref subr_die
24114 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24115
24116 if (call_arg_locations || debug_inline_points)
24117 BLOCK_DIE (stmt) = subr_die;
24118 add_abstract_origin_attribute (subr_die, decl);
24119 if (TREE_ASM_WRITTEN (stmt))
24120 add_high_low_attributes (stmt, subr_die);
24121 add_call_src_coords_attributes (stmt, subr_die);
24122
24123 decls_for_scope (stmt, subr_die);
24124 }
24125 }
24126
24127 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24128 the comment for VLR_CONTEXT. */
24129
24130 static void
24131 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24132 {
24133 dw_die_ref decl_die;
24134
24135 if (TREE_TYPE (decl) == error_mark_node)
24136 return;
24137
24138 decl_die = new_die (DW_TAG_member, context_die, decl);
24139 add_name_and_src_coords_attributes (decl_die, decl);
24140 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24141 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24142 context_die);
24143
24144 if (DECL_BIT_FIELD_TYPE (decl))
24145 {
24146 add_byte_size_attribute (decl_die, decl);
24147 add_bit_size_attribute (decl_die, decl);
24148 add_bit_offset_attribute (decl_die, decl, ctx);
24149 }
24150
24151 add_alignment_attribute (decl_die, decl);
24152
24153 /* If we have a variant part offset, then we are supposed to process a member
24154 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24155 trees. */
24156 gcc_assert (ctx->variant_part_offset == NULL_TREE
24157 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24158 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24159 add_data_member_location_attribute (decl_die, decl, ctx);
24160
24161 if (DECL_ARTIFICIAL (decl))
24162 add_AT_flag (decl_die, DW_AT_artificial, 1);
24163
24164 add_accessibility_attribute (decl_die, decl);
24165
24166 /* Equate decl number to die, so that we can look up this decl later on. */
24167 equate_decl_number_to_die (decl, decl_die);
24168 }
24169
24170 /* Generate a DIE for a pointer to a member type. TYPE can be an
24171 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24172 pointer to member function. */
24173
24174 static void
24175 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24176 {
24177 if (lookup_type_die (type))
24178 return;
24179
24180 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24181 scope_die_for (type, context_die), type);
24182
24183 equate_type_number_to_die (type, ptr_die);
24184 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24185 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24186 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24187 context_die);
24188 add_alignment_attribute (ptr_die, type);
24189
24190 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24191 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24192 {
24193 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24194 add_AT_loc (ptr_die, DW_AT_use_location, op);
24195 }
24196 }
24197
24198 static char *producer_string;
24199
24200 /* Return a heap allocated producer string including command line options
24201 if -grecord-gcc-switches. */
24202
24203 static char *
24204 gen_producer_string (void)
24205 {
24206 size_t j;
24207 auto_vec<const char *> switches;
24208 const char *language_string = lang_hooks.name;
24209 char *producer, *tail;
24210 const char *p;
24211 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24212 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24213
24214 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24215 switch (save_decoded_options[j].opt_index)
24216 {
24217 case OPT_o:
24218 case OPT_d:
24219 case OPT_dumpbase:
24220 case OPT_dumpdir:
24221 case OPT_auxbase:
24222 case OPT_auxbase_strip:
24223 case OPT_quiet:
24224 case OPT_version:
24225 case OPT_v:
24226 case OPT_w:
24227 case OPT_L:
24228 case OPT_D:
24229 case OPT_I:
24230 case OPT_U:
24231 case OPT_SPECIAL_unknown:
24232 case OPT_SPECIAL_ignore:
24233 case OPT_SPECIAL_program_name:
24234 case OPT_SPECIAL_input_file:
24235 case OPT_grecord_gcc_switches:
24236 case OPT__output_pch_:
24237 case OPT_fdiagnostics_show_location_:
24238 case OPT_fdiagnostics_show_option:
24239 case OPT_fdiagnostics_show_caret:
24240 case OPT_fdiagnostics_color_:
24241 case OPT_fverbose_asm:
24242 case OPT____:
24243 case OPT__sysroot_:
24244 case OPT_nostdinc:
24245 case OPT_nostdinc__:
24246 case OPT_fpreprocessed:
24247 case OPT_fltrans_output_list_:
24248 case OPT_fresolution_:
24249 case OPT_fdebug_prefix_map_:
24250 case OPT_fmacro_prefix_map_:
24251 case OPT_ffile_prefix_map_:
24252 case OPT_fcompare_debug:
24253 case OPT_fchecking:
24254 case OPT_fchecking_:
24255 /* Ignore these. */
24256 continue;
24257 default:
24258 if (cl_options[save_decoded_options[j].opt_index].flags
24259 & CL_NO_DWARF_RECORD)
24260 continue;
24261 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24262 == '-');
24263 switch (save_decoded_options[j].canonical_option[0][1])
24264 {
24265 case 'M':
24266 case 'i':
24267 case 'W':
24268 continue;
24269 case 'f':
24270 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24271 "dump", 4) == 0)
24272 continue;
24273 break;
24274 default:
24275 break;
24276 }
24277 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24278 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24279 break;
24280 }
24281
24282 producer = XNEWVEC (char, plen + 1 + len + 1);
24283 tail = producer;
24284 sprintf (tail, "%s %s", language_string, version_string);
24285 tail += plen;
24286
24287 FOR_EACH_VEC_ELT (switches, j, p)
24288 {
24289 len = strlen (p);
24290 *tail = ' ';
24291 memcpy (tail + 1, p, len);
24292 tail += len + 1;
24293 }
24294
24295 *tail = '\0';
24296 return producer;
24297 }
24298
24299 /* Given a C and/or C++ language/version string return the "highest".
24300 C++ is assumed to be "higher" than C in this case. Used for merging
24301 LTO translation unit languages. */
24302 static const char *
24303 highest_c_language (const char *lang1, const char *lang2)
24304 {
24305 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24306 return "GNU C++17";
24307 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24308 return "GNU C++14";
24309 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24310 return "GNU C++11";
24311 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24312 return "GNU C++98";
24313
24314 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24315 return "GNU C17";
24316 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24317 return "GNU C11";
24318 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24319 return "GNU C99";
24320 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24321 return "GNU C89";
24322
24323 gcc_unreachable ();
24324 }
24325
24326
24327 /* Generate the DIE for the compilation unit. */
24328
24329 static dw_die_ref
24330 gen_compile_unit_die (const char *filename)
24331 {
24332 dw_die_ref die;
24333 const char *language_string = lang_hooks.name;
24334 int language;
24335
24336 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24337
24338 if (filename)
24339 {
24340 add_name_attribute (die, filename);
24341 /* Don't add cwd for <built-in>. */
24342 if (filename[0] != '<')
24343 add_comp_dir_attribute (die);
24344 }
24345
24346 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24347
24348 /* If our producer is LTO try to figure out a common language to use
24349 from the global list of translation units. */
24350 if (strcmp (language_string, "GNU GIMPLE") == 0)
24351 {
24352 unsigned i;
24353 tree t;
24354 const char *common_lang = NULL;
24355
24356 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24357 {
24358 if (!TRANSLATION_UNIT_LANGUAGE (t))
24359 continue;
24360 if (!common_lang)
24361 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24362 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24363 ;
24364 else if (strncmp (common_lang, "GNU C", 5) == 0
24365 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24366 /* Mixing C and C++ is ok, use C++ in that case. */
24367 common_lang = highest_c_language (common_lang,
24368 TRANSLATION_UNIT_LANGUAGE (t));
24369 else
24370 {
24371 /* Fall back to C. */
24372 common_lang = NULL;
24373 break;
24374 }
24375 }
24376
24377 if (common_lang)
24378 language_string = common_lang;
24379 }
24380
24381 language = DW_LANG_C;
24382 if (strncmp (language_string, "GNU C", 5) == 0
24383 && ISDIGIT (language_string[5]))
24384 {
24385 language = DW_LANG_C89;
24386 if (dwarf_version >= 3 || !dwarf_strict)
24387 {
24388 if (strcmp (language_string, "GNU C89") != 0)
24389 language = DW_LANG_C99;
24390
24391 if (dwarf_version >= 5 /* || !dwarf_strict */)
24392 if (strcmp (language_string, "GNU C11") == 0
24393 || strcmp (language_string, "GNU C17") == 0)
24394 language = DW_LANG_C11;
24395 }
24396 }
24397 else if (strncmp (language_string, "GNU C++", 7) == 0)
24398 {
24399 language = DW_LANG_C_plus_plus;
24400 if (dwarf_version >= 5 /* || !dwarf_strict */)
24401 {
24402 if (strcmp (language_string, "GNU C++11") == 0)
24403 language = DW_LANG_C_plus_plus_11;
24404 else if (strcmp (language_string, "GNU C++14") == 0)
24405 language = DW_LANG_C_plus_plus_14;
24406 else if (strcmp (language_string, "GNU C++17") == 0)
24407 /* For now. */
24408 language = DW_LANG_C_plus_plus_14;
24409 }
24410 }
24411 else if (strcmp (language_string, "GNU F77") == 0)
24412 language = DW_LANG_Fortran77;
24413 else if (dwarf_version >= 3 || !dwarf_strict)
24414 {
24415 if (strcmp (language_string, "GNU Ada") == 0)
24416 language = DW_LANG_Ada95;
24417 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24418 {
24419 language = DW_LANG_Fortran95;
24420 if (dwarf_version >= 5 /* || !dwarf_strict */)
24421 {
24422 if (strcmp (language_string, "GNU Fortran2003") == 0)
24423 language = DW_LANG_Fortran03;
24424 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24425 language = DW_LANG_Fortran08;
24426 }
24427 }
24428 else if (strcmp (language_string, "GNU Objective-C") == 0)
24429 language = DW_LANG_ObjC;
24430 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24431 language = DW_LANG_ObjC_plus_plus;
24432 else if (dwarf_version >= 5 || !dwarf_strict)
24433 {
24434 if (strcmp (language_string, "GNU Go") == 0)
24435 language = DW_LANG_Go;
24436 }
24437 }
24438 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24439 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24440 language = DW_LANG_Fortran90;
24441
24442 add_AT_unsigned (die, DW_AT_language, language);
24443
24444 switch (language)
24445 {
24446 case DW_LANG_Fortran77:
24447 case DW_LANG_Fortran90:
24448 case DW_LANG_Fortran95:
24449 case DW_LANG_Fortran03:
24450 case DW_LANG_Fortran08:
24451 /* Fortran has case insensitive identifiers and the front-end
24452 lowercases everything. */
24453 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24454 break;
24455 default:
24456 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24457 break;
24458 }
24459 return die;
24460 }
24461
24462 /* Generate the DIE for a base class. */
24463
24464 static void
24465 gen_inheritance_die (tree binfo, tree access, tree type,
24466 dw_die_ref context_die)
24467 {
24468 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24469 struct vlr_context ctx = { type, NULL };
24470
24471 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24472 context_die);
24473 add_data_member_location_attribute (die, binfo, &ctx);
24474
24475 if (BINFO_VIRTUAL_P (binfo))
24476 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24477
24478 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24479 children, otherwise the default is DW_ACCESS_public. In DWARF2
24480 the default has always been DW_ACCESS_private. */
24481 if (access == access_public_node)
24482 {
24483 if (dwarf_version == 2
24484 || context_die->die_tag == DW_TAG_class_type)
24485 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24486 }
24487 else if (access == access_protected_node)
24488 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24489 else if (dwarf_version > 2
24490 && context_die->die_tag != DW_TAG_class_type)
24491 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24492 }
24493
24494 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24495 structure. */
24496 static bool
24497 is_variant_part (tree decl)
24498 {
24499 return (TREE_CODE (decl) == FIELD_DECL
24500 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24501 }
24502
24503 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24504 return the FIELD_DECL. Return NULL_TREE otherwise. */
24505
24506 static tree
24507 analyze_discr_in_predicate (tree operand, tree struct_type)
24508 {
24509 bool continue_stripping = true;
24510 while (continue_stripping)
24511 switch (TREE_CODE (operand))
24512 {
24513 CASE_CONVERT:
24514 operand = TREE_OPERAND (operand, 0);
24515 break;
24516 default:
24517 continue_stripping = false;
24518 break;
24519 }
24520
24521 /* Match field access to members of struct_type only. */
24522 if (TREE_CODE (operand) == COMPONENT_REF
24523 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24524 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24525 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24526 return TREE_OPERAND (operand, 1);
24527 else
24528 return NULL_TREE;
24529 }
24530
24531 /* Check that SRC is a constant integer that can be represented as a native
24532 integer constant (either signed or unsigned). If so, store it into DEST and
24533 return true. Return false otherwise. */
24534
24535 static bool
24536 get_discr_value (tree src, dw_discr_value *dest)
24537 {
24538 tree discr_type = TREE_TYPE (src);
24539
24540 if (lang_hooks.types.get_debug_type)
24541 {
24542 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24543 if (debug_type != NULL)
24544 discr_type = debug_type;
24545 }
24546
24547 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24548 return false;
24549
24550 /* Signedness can vary between the original type and the debug type. This
24551 can happen for character types in Ada for instance: the character type
24552 used for code generation can be signed, to be compatible with the C one,
24553 but from a debugger point of view, it must be unsigned. */
24554 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24555 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24556
24557 if (is_orig_unsigned != is_debug_unsigned)
24558 src = fold_convert (discr_type, src);
24559
24560 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24561 return false;
24562
24563 dest->pos = is_debug_unsigned;
24564 if (is_debug_unsigned)
24565 dest->v.uval = tree_to_uhwi (src);
24566 else
24567 dest->v.sval = tree_to_shwi (src);
24568
24569 return true;
24570 }
24571
24572 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24573 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24574 store NULL_TREE in DISCR_DECL. Otherwise:
24575
24576 - store the discriminant field in STRUCT_TYPE that controls the variant
24577 part to *DISCR_DECL
24578
24579 - put in *DISCR_LISTS_P an array where for each variant, the item
24580 represents the corresponding matching list of discriminant values.
24581
24582 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24583 the above array.
24584
24585 Note that when the array is allocated (i.e. when the analysis is
24586 successful), it is up to the caller to free the array. */
24587
24588 static void
24589 analyze_variants_discr (tree variant_part_decl,
24590 tree struct_type,
24591 tree *discr_decl,
24592 dw_discr_list_ref **discr_lists_p,
24593 unsigned *discr_lists_length)
24594 {
24595 tree variant_part_type = TREE_TYPE (variant_part_decl);
24596 tree variant;
24597 dw_discr_list_ref *discr_lists;
24598 unsigned i;
24599
24600 /* Compute how many variants there are in this variant part. */
24601 *discr_lists_length = 0;
24602 for (variant = TYPE_FIELDS (variant_part_type);
24603 variant != NULL_TREE;
24604 variant = DECL_CHAIN (variant))
24605 ++*discr_lists_length;
24606
24607 *discr_decl = NULL_TREE;
24608 *discr_lists_p
24609 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24610 sizeof (**discr_lists_p));
24611 discr_lists = *discr_lists_p;
24612
24613 /* And then analyze all variants to extract discriminant information for all
24614 of them. This analysis is conservative: as soon as we detect something we
24615 do not support, abort everything and pretend we found nothing. */
24616 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24617 variant != NULL_TREE;
24618 variant = DECL_CHAIN (variant), ++i)
24619 {
24620 tree match_expr = DECL_QUALIFIER (variant);
24621
24622 /* Now, try to analyze the predicate and deduce a discriminant for
24623 it. */
24624 if (match_expr == boolean_true_node)
24625 /* Typically happens for the default variant: it matches all cases that
24626 previous variants rejected. Don't output any matching value for
24627 this one. */
24628 continue;
24629
24630 /* The following loop tries to iterate over each discriminant
24631 possibility: single values or ranges. */
24632 while (match_expr != NULL_TREE)
24633 {
24634 tree next_round_match_expr;
24635 tree candidate_discr = NULL_TREE;
24636 dw_discr_list_ref new_node = NULL;
24637
24638 /* Possibilities are matched one after the other by nested
24639 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24640 continue with the rest at next iteration. */
24641 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24642 {
24643 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24644 match_expr = TREE_OPERAND (match_expr, 1);
24645 }
24646 else
24647 next_round_match_expr = NULL_TREE;
24648
24649 if (match_expr == boolean_false_node)
24650 /* This sub-expression matches nothing: just wait for the next
24651 one. */
24652 ;
24653
24654 else if (TREE_CODE (match_expr) == EQ_EXPR)
24655 {
24656 /* We are matching: <discr_field> == <integer_cst>
24657 This sub-expression matches a single value. */
24658 tree integer_cst = TREE_OPERAND (match_expr, 1);
24659
24660 candidate_discr
24661 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24662 struct_type);
24663
24664 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24665 if (!get_discr_value (integer_cst,
24666 &new_node->dw_discr_lower_bound))
24667 goto abort;
24668 new_node->dw_discr_range = false;
24669 }
24670
24671 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24672 {
24673 /* We are matching:
24674 <discr_field> > <integer_cst>
24675 && <discr_field> < <integer_cst>.
24676 This sub-expression matches the range of values between the
24677 two matched integer constants. Note that comparisons can be
24678 inclusive or exclusive. */
24679 tree candidate_discr_1, candidate_discr_2;
24680 tree lower_cst, upper_cst;
24681 bool lower_cst_included, upper_cst_included;
24682 tree lower_op = TREE_OPERAND (match_expr, 0);
24683 tree upper_op = TREE_OPERAND (match_expr, 1);
24684
24685 /* When the comparison is exclusive, the integer constant is not
24686 the discriminant range bound we are looking for: we will have
24687 to increment or decrement it. */
24688 if (TREE_CODE (lower_op) == GE_EXPR)
24689 lower_cst_included = true;
24690 else if (TREE_CODE (lower_op) == GT_EXPR)
24691 lower_cst_included = false;
24692 else
24693 goto abort;
24694
24695 if (TREE_CODE (upper_op) == LE_EXPR)
24696 upper_cst_included = true;
24697 else if (TREE_CODE (upper_op) == LT_EXPR)
24698 upper_cst_included = false;
24699 else
24700 goto abort;
24701
24702 /* Extract the discriminant from the first operand and check it
24703 is consistant with the same analysis in the second
24704 operand. */
24705 candidate_discr_1
24706 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24707 struct_type);
24708 candidate_discr_2
24709 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24710 struct_type);
24711 if (candidate_discr_1 == candidate_discr_2)
24712 candidate_discr = candidate_discr_1;
24713 else
24714 goto abort;
24715
24716 /* Extract bounds from both. */
24717 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24718 lower_cst = TREE_OPERAND (lower_op, 1);
24719 upper_cst = TREE_OPERAND (upper_op, 1);
24720
24721 if (!lower_cst_included)
24722 lower_cst
24723 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24724 build_int_cst (TREE_TYPE (lower_cst), 1));
24725 if (!upper_cst_included)
24726 upper_cst
24727 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24728 build_int_cst (TREE_TYPE (upper_cst), 1));
24729
24730 if (!get_discr_value (lower_cst,
24731 &new_node->dw_discr_lower_bound)
24732 || !get_discr_value (upper_cst,
24733 &new_node->dw_discr_upper_bound))
24734 goto abort;
24735
24736 new_node->dw_discr_range = true;
24737 }
24738
24739 else
24740 /* Unsupported sub-expression: we cannot determine the set of
24741 matching discriminant values. Abort everything. */
24742 goto abort;
24743
24744 /* If the discriminant info is not consistant with what we saw so
24745 far, consider the analysis failed and abort everything. */
24746 if (candidate_discr == NULL_TREE
24747 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24748 goto abort;
24749 else
24750 *discr_decl = candidate_discr;
24751
24752 if (new_node != NULL)
24753 {
24754 new_node->dw_discr_next = discr_lists[i];
24755 discr_lists[i] = new_node;
24756 }
24757 match_expr = next_round_match_expr;
24758 }
24759 }
24760
24761 /* If we reach this point, we could match everything we were interested
24762 in. */
24763 return;
24764
24765 abort:
24766 /* Clean all data structure and return no result. */
24767 free (*discr_lists_p);
24768 *discr_lists_p = NULL;
24769 *discr_decl = NULL_TREE;
24770 }
24771
24772 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24773 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24774 under CONTEXT_DIE.
24775
24776 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24777 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24778 this type, which are record types, represent the available variants and each
24779 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24780 values are inferred from these attributes.
24781
24782 In trees, the offsets for the fields inside these sub-records are relative
24783 to the variant part itself, whereas the corresponding DIEs should have
24784 offset attributes that are relative to the embedding record base address.
24785 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24786 must be an expression that computes the offset of the variant part to
24787 describe in DWARF. */
24788
24789 static void
24790 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24791 dw_die_ref context_die)
24792 {
24793 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24794 tree variant_part_offset = vlr_ctx->variant_part_offset;
24795 struct loc_descr_context ctx = {
24796 vlr_ctx->struct_type, /* context_type */
24797 NULL_TREE, /* base_decl */
24798 NULL, /* dpi */
24799 false, /* placeholder_arg */
24800 false /* placeholder_seen */
24801 };
24802
24803 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24804 NULL_TREE if there is no such field. */
24805 tree discr_decl = NULL_TREE;
24806 dw_discr_list_ref *discr_lists;
24807 unsigned discr_lists_length = 0;
24808 unsigned i;
24809
24810 dw_die_ref dwarf_proc_die = NULL;
24811 dw_die_ref variant_part_die
24812 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24813
24814 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24815
24816 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24817 &discr_decl, &discr_lists, &discr_lists_length);
24818
24819 if (discr_decl != NULL_TREE)
24820 {
24821 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24822
24823 if (discr_die)
24824 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24825 else
24826 /* We have no DIE for the discriminant, so just discard all
24827 discrimimant information in the output. */
24828 discr_decl = NULL_TREE;
24829 }
24830
24831 /* If the offset for this variant part is more complex than a constant,
24832 create a DWARF procedure for it so that we will not have to generate DWARF
24833 expressions for it for each member. */
24834 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24835 && (dwarf_version >= 3 || !dwarf_strict))
24836 {
24837 const tree dwarf_proc_fndecl
24838 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24839 build_function_type (TREE_TYPE (variant_part_offset),
24840 NULL_TREE));
24841 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24842 const dw_loc_descr_ref dwarf_proc_body
24843 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24844
24845 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24846 dwarf_proc_fndecl, context_die);
24847 if (dwarf_proc_die != NULL)
24848 variant_part_offset = dwarf_proc_call;
24849 }
24850
24851 /* Output DIEs for all variants. */
24852 i = 0;
24853 for (tree variant = TYPE_FIELDS (variant_part_type);
24854 variant != NULL_TREE;
24855 variant = DECL_CHAIN (variant), ++i)
24856 {
24857 tree variant_type = TREE_TYPE (variant);
24858 dw_die_ref variant_die;
24859
24860 /* All variants (i.e. members of a variant part) are supposed to be
24861 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24862 under these records. */
24863 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24864
24865 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24866 equate_decl_number_to_die (variant, variant_die);
24867
24868 /* Output discriminant values this variant matches, if any. */
24869 if (discr_decl == NULL || discr_lists[i] == NULL)
24870 /* In the case we have discriminant information at all, this is
24871 probably the default variant: as the standard says, don't
24872 output any discriminant value/list attribute. */
24873 ;
24874 else if (discr_lists[i]->dw_discr_next == NULL
24875 && !discr_lists[i]->dw_discr_range)
24876 /* If there is only one accepted value, don't bother outputting a
24877 list. */
24878 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24879 else
24880 add_discr_list (variant_die, discr_lists[i]);
24881
24882 for (tree member = TYPE_FIELDS (variant_type);
24883 member != NULL_TREE;
24884 member = DECL_CHAIN (member))
24885 {
24886 struct vlr_context vlr_sub_ctx = {
24887 vlr_ctx->struct_type, /* struct_type */
24888 NULL /* variant_part_offset */
24889 };
24890 if (is_variant_part (member))
24891 {
24892 /* All offsets for fields inside variant parts are relative to
24893 the top-level embedding RECORD_TYPE's base address. On the
24894 other hand, offsets in GCC's types are relative to the
24895 nested-most variant part. So we have to sum offsets each time
24896 we recurse. */
24897
24898 vlr_sub_ctx.variant_part_offset
24899 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24900 variant_part_offset, byte_position (member));
24901 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24902 }
24903 else
24904 {
24905 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24906 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24907 }
24908 }
24909 }
24910
24911 free (discr_lists);
24912 }
24913
24914 /* Generate a DIE for a class member. */
24915
24916 static void
24917 gen_member_die (tree type, dw_die_ref context_die)
24918 {
24919 tree member;
24920 tree binfo = TYPE_BINFO (type);
24921
24922 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24923
24924 /* If this is not an incomplete type, output descriptions of each of its
24925 members. Note that as we output the DIEs necessary to represent the
24926 members of this record or union type, we will also be trying to output
24927 DIEs to represent the *types* of those members. However the `type'
24928 function (above) will specifically avoid generating type DIEs for member
24929 types *within* the list of member DIEs for this (containing) type except
24930 for those types (of members) which are explicitly marked as also being
24931 members of this (containing) type themselves. The g++ front- end can
24932 force any given type to be treated as a member of some other (containing)
24933 type by setting the TYPE_CONTEXT of the given (member) type to point to
24934 the TREE node representing the appropriate (containing) type. */
24935
24936 /* First output info about the base classes. */
24937 if (binfo)
24938 {
24939 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24940 int i;
24941 tree base;
24942
24943 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24944 gen_inheritance_die (base,
24945 (accesses ? (*accesses)[i] : access_public_node),
24946 type,
24947 context_die);
24948 }
24949
24950 /* Now output info about the data members and type members. */
24951 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24952 {
24953 struct vlr_context vlr_ctx = { type, NULL_TREE };
24954 bool static_inline_p
24955 = (TREE_STATIC (member)
24956 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24957 != -1));
24958
24959 /* Ignore clones. */
24960 if (DECL_ABSTRACT_ORIGIN (member))
24961 continue;
24962
24963 /* If we thought we were generating minimal debug info for TYPE
24964 and then changed our minds, some of the member declarations
24965 may have already been defined. Don't define them again, but
24966 do put them in the right order. */
24967
24968 if (dw_die_ref child = lookup_decl_die (member))
24969 {
24970 /* Handle inline static data members, which only have in-class
24971 declarations. */
24972 dw_die_ref ref = NULL;
24973 if (child->die_tag == DW_TAG_variable
24974 && child->die_parent == comp_unit_die ())
24975 {
24976 ref = get_AT_ref (child, DW_AT_specification);
24977 /* For C++17 inline static data members followed by redundant
24978 out of class redeclaration, we might get here with
24979 child being the DIE created for the out of class
24980 redeclaration and with its DW_AT_specification being
24981 the DIE created for in-class definition. We want to
24982 reparent the latter, and don't want to create another
24983 DIE with DW_AT_specification in that case, because
24984 we already have one. */
24985 if (ref
24986 && static_inline_p
24987 && ref->die_tag == DW_TAG_variable
24988 && ref->die_parent == comp_unit_die ()
24989 && get_AT (ref, DW_AT_specification) == NULL)
24990 {
24991 child = ref;
24992 ref = NULL;
24993 static_inline_p = false;
24994 }
24995 }
24996
24997 if (child->die_tag == DW_TAG_variable
24998 && child->die_parent == comp_unit_die ()
24999 && ref == NULL)
25000 {
25001 reparent_child (child, context_die);
25002 if (dwarf_version < 5)
25003 child->die_tag = DW_TAG_member;
25004 }
25005 else
25006 splice_child_die (context_die, child);
25007 }
25008
25009 /* Do not generate standard DWARF for variant parts if we are generating
25010 the corresponding GNAT encodings: DIEs generated for both would
25011 conflict in our mappings. */
25012 else if (is_variant_part (member)
25013 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25014 {
25015 vlr_ctx.variant_part_offset = byte_position (member);
25016 gen_variant_part (member, &vlr_ctx, context_die);
25017 }
25018 else
25019 {
25020 vlr_ctx.variant_part_offset = NULL_TREE;
25021 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25022 }
25023
25024 /* For C++ inline static data members emit immediately a DW_TAG_variable
25025 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25026 DW_AT_specification. */
25027 if (static_inline_p)
25028 {
25029 int old_extern = DECL_EXTERNAL (member);
25030 DECL_EXTERNAL (member) = 0;
25031 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25032 DECL_EXTERNAL (member) = old_extern;
25033 }
25034 }
25035 }
25036
25037 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25038 is set, we pretend that the type was never defined, so we only get the
25039 member DIEs needed by later specification DIEs. */
25040
25041 static void
25042 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25043 enum debug_info_usage usage)
25044 {
25045 if (TREE_ASM_WRITTEN (type))
25046 {
25047 /* Fill in the bound of variable-length fields in late dwarf if
25048 still incomplete. */
25049 if (!early_dwarf && variably_modified_type_p (type, NULL))
25050 for (tree member = TYPE_FIELDS (type);
25051 member;
25052 member = DECL_CHAIN (member))
25053 fill_variable_array_bounds (TREE_TYPE (member));
25054 return;
25055 }
25056
25057 dw_die_ref type_die = lookup_type_die (type);
25058 dw_die_ref scope_die = 0;
25059 int nested = 0;
25060 int complete = (TYPE_SIZE (type)
25061 && (! TYPE_STUB_DECL (type)
25062 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25063 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25064 complete = complete && should_emit_struct_debug (type, usage);
25065
25066 if (type_die && ! complete)
25067 return;
25068
25069 if (TYPE_CONTEXT (type) != NULL_TREE
25070 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25071 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25072 nested = 1;
25073
25074 scope_die = scope_die_for (type, context_die);
25075
25076 /* Generate child dies for template paramaters. */
25077 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25078 schedule_generic_params_dies_gen (type);
25079
25080 if (! type_die || (nested && is_cu_die (scope_die)))
25081 /* First occurrence of type or toplevel definition of nested class. */
25082 {
25083 dw_die_ref old_die = type_die;
25084
25085 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25086 ? record_type_tag (type) : DW_TAG_union_type,
25087 scope_die, type);
25088 equate_type_number_to_die (type, type_die);
25089 if (old_die)
25090 add_AT_specification (type_die, old_die);
25091 else
25092 add_name_attribute (type_die, type_tag (type));
25093 }
25094 else
25095 remove_AT (type_die, DW_AT_declaration);
25096
25097 /* If this type has been completed, then give it a byte_size attribute and
25098 then give a list of members. */
25099 if (complete && !ns_decl)
25100 {
25101 /* Prevent infinite recursion in cases where the type of some member of
25102 this type is expressed in terms of this type itself. */
25103 TREE_ASM_WRITTEN (type) = 1;
25104 add_byte_size_attribute (type_die, type);
25105 add_alignment_attribute (type_die, type);
25106 if (TYPE_STUB_DECL (type) != NULL_TREE)
25107 {
25108 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25109 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25110 }
25111
25112 /* If the first reference to this type was as the return type of an
25113 inline function, then it may not have a parent. Fix this now. */
25114 if (type_die->die_parent == NULL)
25115 add_child_die (scope_die, type_die);
25116
25117 push_decl_scope (type);
25118 gen_member_die (type, type_die);
25119 pop_decl_scope ();
25120
25121 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25122 if (TYPE_ARTIFICIAL (type))
25123 add_AT_flag (type_die, DW_AT_artificial, 1);
25124
25125 /* GNU extension: Record what type our vtable lives in. */
25126 if (TYPE_VFIELD (type))
25127 {
25128 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25129
25130 gen_type_die (vtype, context_die);
25131 add_AT_die_ref (type_die, DW_AT_containing_type,
25132 lookup_type_die (vtype));
25133 }
25134 }
25135 else
25136 {
25137 add_AT_flag (type_die, DW_AT_declaration, 1);
25138
25139 /* We don't need to do this for function-local types. */
25140 if (TYPE_STUB_DECL (type)
25141 && ! decl_function_context (TYPE_STUB_DECL (type)))
25142 vec_safe_push (incomplete_types, type);
25143 }
25144
25145 if (get_AT (type_die, DW_AT_name))
25146 add_pubtype (type, type_die);
25147 }
25148
25149 /* Generate a DIE for a subroutine _type_. */
25150
25151 static void
25152 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25153 {
25154 tree return_type = TREE_TYPE (type);
25155 dw_die_ref subr_die
25156 = new_die (DW_TAG_subroutine_type,
25157 scope_die_for (type, context_die), type);
25158
25159 equate_type_number_to_die (type, subr_die);
25160 add_prototyped_attribute (subr_die, type);
25161 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25162 context_die);
25163 add_alignment_attribute (subr_die, type);
25164 gen_formal_types_die (type, subr_die);
25165
25166 if (get_AT (subr_die, DW_AT_name))
25167 add_pubtype (type, subr_die);
25168 if ((dwarf_version >= 5 || !dwarf_strict)
25169 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25170 add_AT_flag (subr_die, DW_AT_reference, 1);
25171 if ((dwarf_version >= 5 || !dwarf_strict)
25172 && lang_hooks.types.type_dwarf_attribute (type,
25173 DW_AT_rvalue_reference) != -1)
25174 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25175 }
25176
25177 /* Generate a DIE for a type definition. */
25178
25179 static void
25180 gen_typedef_die (tree decl, dw_die_ref context_die)
25181 {
25182 dw_die_ref type_die;
25183 tree type;
25184
25185 if (TREE_ASM_WRITTEN (decl))
25186 {
25187 if (DECL_ORIGINAL_TYPE (decl))
25188 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25189 return;
25190 }
25191
25192 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25193 checks in process_scope_var and modified_type_die), this should be called
25194 only for original types. */
25195 gcc_assert (decl_ultimate_origin (decl) == NULL
25196 || decl_ultimate_origin (decl) == decl);
25197
25198 TREE_ASM_WRITTEN (decl) = 1;
25199 type_die = new_die (DW_TAG_typedef, context_die, decl);
25200
25201 add_name_and_src_coords_attributes (type_die, decl);
25202 if (DECL_ORIGINAL_TYPE (decl))
25203 {
25204 type = DECL_ORIGINAL_TYPE (decl);
25205 if (type == error_mark_node)
25206 return;
25207
25208 gcc_assert (type != TREE_TYPE (decl));
25209 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25210 }
25211 else
25212 {
25213 type = TREE_TYPE (decl);
25214 if (type == error_mark_node)
25215 return;
25216
25217 if (is_naming_typedef_decl (TYPE_NAME (type)))
25218 {
25219 /* Here, we are in the case of decl being a typedef naming
25220 an anonymous type, e.g:
25221 typedef struct {...} foo;
25222 In that case TREE_TYPE (decl) is not a typedef variant
25223 type and TYPE_NAME of the anonymous type is set to the
25224 TYPE_DECL of the typedef. This construct is emitted by
25225 the C++ FE.
25226
25227 TYPE is the anonymous struct named by the typedef
25228 DECL. As we need the DW_AT_type attribute of the
25229 DW_TAG_typedef to point to the DIE of TYPE, let's
25230 generate that DIE right away. add_type_attribute
25231 called below will then pick (via lookup_type_die) that
25232 anonymous struct DIE. */
25233 if (!TREE_ASM_WRITTEN (type))
25234 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25235
25236 /* This is a GNU Extension. We are adding a
25237 DW_AT_linkage_name attribute to the DIE of the
25238 anonymous struct TYPE. The value of that attribute
25239 is the name of the typedef decl naming the anonymous
25240 struct. This greatly eases the work of consumers of
25241 this debug info. */
25242 add_linkage_name_raw (lookup_type_die (type), decl);
25243 }
25244 }
25245
25246 add_type_attribute (type_die, type, decl_quals (decl), false,
25247 context_die);
25248
25249 if (is_naming_typedef_decl (decl))
25250 /* We want that all subsequent calls to lookup_type_die with
25251 TYPE in argument yield the DW_TAG_typedef we have just
25252 created. */
25253 equate_type_number_to_die (type, type_die);
25254
25255 add_alignment_attribute (type_die, TREE_TYPE (decl));
25256
25257 add_accessibility_attribute (type_die, decl);
25258
25259 if (DECL_ABSTRACT_P (decl))
25260 equate_decl_number_to_die (decl, type_die);
25261
25262 if (get_AT (type_die, DW_AT_name))
25263 add_pubtype (decl, type_die);
25264 }
25265
25266 /* Generate a DIE for a struct, class, enum or union type. */
25267
25268 static void
25269 gen_tagged_type_die (tree type,
25270 dw_die_ref context_die,
25271 enum debug_info_usage usage)
25272 {
25273 int need_pop;
25274
25275 if (type == NULL_TREE
25276 || !is_tagged_type (type))
25277 return;
25278
25279 if (TREE_ASM_WRITTEN (type))
25280 need_pop = 0;
25281 /* If this is a nested type whose containing class hasn't been written
25282 out yet, writing it out will cover this one, too. This does not apply
25283 to instantiations of member class templates; they need to be added to
25284 the containing class as they are generated. FIXME: This hurts the
25285 idea of combining type decls from multiple TUs, since we can't predict
25286 what set of template instantiations we'll get. */
25287 else if (TYPE_CONTEXT (type)
25288 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25289 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25290 {
25291 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25292
25293 if (TREE_ASM_WRITTEN (type))
25294 return;
25295
25296 /* If that failed, attach ourselves to the stub. */
25297 push_decl_scope (TYPE_CONTEXT (type));
25298 context_die = lookup_type_die (TYPE_CONTEXT (type));
25299 need_pop = 1;
25300 }
25301 else if (TYPE_CONTEXT (type) != NULL_TREE
25302 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25303 {
25304 /* If this type is local to a function that hasn't been written
25305 out yet, use a NULL context for now; it will be fixed up in
25306 decls_for_scope. */
25307 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25308 /* A declaration DIE doesn't count; nested types need to go in the
25309 specification. */
25310 if (context_die && is_declaration_die (context_die))
25311 context_die = NULL;
25312 need_pop = 0;
25313 }
25314 else
25315 {
25316 context_die = declare_in_namespace (type, context_die);
25317 need_pop = 0;
25318 }
25319
25320 if (TREE_CODE (type) == ENUMERAL_TYPE)
25321 {
25322 /* This might have been written out by the call to
25323 declare_in_namespace. */
25324 if (!TREE_ASM_WRITTEN (type))
25325 gen_enumeration_type_die (type, context_die);
25326 }
25327 else
25328 gen_struct_or_union_type_die (type, context_die, usage);
25329
25330 if (need_pop)
25331 pop_decl_scope ();
25332
25333 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25334 it up if it is ever completed. gen_*_type_die will set it for us
25335 when appropriate. */
25336 }
25337
25338 /* Generate a type description DIE. */
25339
25340 static void
25341 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25342 enum debug_info_usage usage)
25343 {
25344 struct array_descr_info info;
25345
25346 if (type == NULL_TREE || type == error_mark_node)
25347 return;
25348
25349 if (flag_checking && type)
25350 verify_type (type);
25351
25352 if (TYPE_NAME (type) != NULL_TREE
25353 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25354 && is_redundant_typedef (TYPE_NAME (type))
25355 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25356 /* The DECL of this type is a typedef we don't want to emit debug
25357 info for but we want debug info for its underlying typedef.
25358 This can happen for e.g, the injected-class-name of a C++
25359 type. */
25360 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25361
25362 /* If TYPE is a typedef type variant, let's generate debug info
25363 for the parent typedef which TYPE is a type of. */
25364 if (typedef_variant_p (type))
25365 {
25366 if (TREE_ASM_WRITTEN (type))
25367 return;
25368
25369 tree name = TYPE_NAME (type);
25370 tree origin = decl_ultimate_origin (name);
25371 if (origin != NULL && origin != name)
25372 {
25373 gen_decl_die (origin, NULL, NULL, context_die);
25374 return;
25375 }
25376
25377 /* Prevent broken recursion; we can't hand off to the same type. */
25378 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25379
25380 /* Give typedefs the right scope. */
25381 context_die = scope_die_for (type, context_die);
25382
25383 TREE_ASM_WRITTEN (type) = 1;
25384
25385 gen_decl_die (name, NULL, NULL, context_die);
25386 return;
25387 }
25388
25389 /* If type is an anonymous tagged type named by a typedef, let's
25390 generate debug info for the typedef. */
25391 if (is_naming_typedef_decl (TYPE_NAME (type)))
25392 {
25393 /* Use the DIE of the containing namespace as the parent DIE of
25394 the type description DIE we want to generate. */
25395 if (DECL_CONTEXT (TYPE_NAME (type))
25396 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
25397 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
25398
25399 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25400 return;
25401 }
25402
25403 if (lang_hooks.types.get_debug_type)
25404 {
25405 tree debug_type = lang_hooks.types.get_debug_type (type);
25406
25407 if (debug_type != NULL_TREE && debug_type != type)
25408 {
25409 gen_type_die_with_usage (debug_type, context_die, usage);
25410 return;
25411 }
25412 }
25413
25414 /* We are going to output a DIE to represent the unqualified version
25415 of this type (i.e. without any const or volatile qualifiers) so
25416 get the main variant (i.e. the unqualified version) of this type
25417 now. (Vectors and arrays are special because the debugging info is in the
25418 cloned type itself. Similarly function/method types can contain extra
25419 ref-qualification). */
25420 if (TREE_CODE (type) == FUNCTION_TYPE
25421 || TREE_CODE (type) == METHOD_TYPE)
25422 {
25423 /* For function/method types, can't use type_main_variant here,
25424 because that can have different ref-qualifiers for C++,
25425 but try to canonicalize. */
25426 tree main = TYPE_MAIN_VARIANT (type);
25427 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25428 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25429 && check_base_type (t, main)
25430 && check_lang_type (t, type))
25431 {
25432 type = t;
25433 break;
25434 }
25435 }
25436 else if (TREE_CODE (type) != VECTOR_TYPE
25437 && TREE_CODE (type) != ARRAY_TYPE)
25438 type = type_main_variant (type);
25439
25440 /* If this is an array type with hidden descriptor, handle it first. */
25441 if (!TREE_ASM_WRITTEN (type)
25442 && lang_hooks.types.get_array_descr_info)
25443 {
25444 memset (&info, 0, sizeof (info));
25445 if (lang_hooks.types.get_array_descr_info (type, &info))
25446 {
25447 /* Fortran sometimes emits array types with no dimension. */
25448 gcc_assert (info.ndimensions >= 0
25449 && (info.ndimensions
25450 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25451 gen_descr_array_type_die (type, &info, context_die);
25452 TREE_ASM_WRITTEN (type) = 1;
25453 return;
25454 }
25455 }
25456
25457 if (TREE_ASM_WRITTEN (type))
25458 {
25459 /* Variable-length types may be incomplete even if
25460 TREE_ASM_WRITTEN. For such types, fall through to
25461 gen_array_type_die() and possibly fill in
25462 DW_AT_{upper,lower}_bound attributes. */
25463 if ((TREE_CODE (type) != ARRAY_TYPE
25464 && TREE_CODE (type) != RECORD_TYPE
25465 && TREE_CODE (type) != UNION_TYPE
25466 && TREE_CODE (type) != QUAL_UNION_TYPE)
25467 || !variably_modified_type_p (type, NULL))
25468 return;
25469 }
25470
25471 switch (TREE_CODE (type))
25472 {
25473 case ERROR_MARK:
25474 break;
25475
25476 case POINTER_TYPE:
25477 case REFERENCE_TYPE:
25478 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25479 ensures that the gen_type_die recursion will terminate even if the
25480 type is recursive. Recursive types are possible in Ada. */
25481 /* ??? We could perhaps do this for all types before the switch
25482 statement. */
25483 TREE_ASM_WRITTEN (type) = 1;
25484
25485 /* For these types, all that is required is that we output a DIE (or a
25486 set of DIEs) to represent the "basis" type. */
25487 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25488 DINFO_USAGE_IND_USE);
25489 break;
25490
25491 case OFFSET_TYPE:
25492 /* This code is used for C++ pointer-to-data-member types.
25493 Output a description of the relevant class type. */
25494 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25495 DINFO_USAGE_IND_USE);
25496
25497 /* Output a description of the type of the object pointed to. */
25498 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25499 DINFO_USAGE_IND_USE);
25500
25501 /* Now output a DIE to represent this pointer-to-data-member type
25502 itself. */
25503 gen_ptr_to_mbr_type_die (type, context_die);
25504 break;
25505
25506 case FUNCTION_TYPE:
25507 /* Force out return type (in case it wasn't forced out already). */
25508 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25509 DINFO_USAGE_DIR_USE);
25510 gen_subroutine_type_die (type, context_die);
25511 break;
25512
25513 case METHOD_TYPE:
25514 /* Force out return type (in case it wasn't forced out already). */
25515 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25516 DINFO_USAGE_DIR_USE);
25517 gen_subroutine_type_die (type, context_die);
25518 break;
25519
25520 case ARRAY_TYPE:
25521 case VECTOR_TYPE:
25522 gen_array_type_die (type, context_die);
25523 break;
25524
25525 case ENUMERAL_TYPE:
25526 case RECORD_TYPE:
25527 case UNION_TYPE:
25528 case QUAL_UNION_TYPE:
25529 gen_tagged_type_die (type, context_die, usage);
25530 return;
25531
25532 case VOID_TYPE:
25533 case INTEGER_TYPE:
25534 case REAL_TYPE:
25535 case FIXED_POINT_TYPE:
25536 case COMPLEX_TYPE:
25537 case BOOLEAN_TYPE:
25538 case POINTER_BOUNDS_TYPE:
25539 /* No DIEs needed for fundamental types. */
25540 break;
25541
25542 case NULLPTR_TYPE:
25543 case LANG_TYPE:
25544 /* Just use DW_TAG_unspecified_type. */
25545 {
25546 dw_die_ref type_die = lookup_type_die (type);
25547 if (type_die == NULL)
25548 {
25549 tree name = TYPE_IDENTIFIER (type);
25550 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25551 type);
25552 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25553 equate_type_number_to_die (type, type_die);
25554 }
25555 }
25556 break;
25557
25558 default:
25559 if (is_cxx_auto (type))
25560 {
25561 tree name = TYPE_IDENTIFIER (type);
25562 dw_die_ref *die = (name == get_identifier ("auto")
25563 ? &auto_die : &decltype_auto_die);
25564 if (!*die)
25565 {
25566 *die = new_die (DW_TAG_unspecified_type,
25567 comp_unit_die (), NULL_TREE);
25568 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25569 }
25570 equate_type_number_to_die (type, *die);
25571 break;
25572 }
25573 gcc_unreachable ();
25574 }
25575
25576 TREE_ASM_WRITTEN (type) = 1;
25577 }
25578
25579 static void
25580 gen_type_die (tree type, dw_die_ref context_die)
25581 {
25582 if (type != error_mark_node)
25583 {
25584 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25585 if (flag_checking)
25586 {
25587 dw_die_ref die = lookup_type_die (type);
25588 if (die)
25589 check_die (die);
25590 }
25591 }
25592 }
25593
25594 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25595 things which are local to the given block. */
25596
25597 static void
25598 gen_block_die (tree stmt, dw_die_ref context_die)
25599 {
25600 int must_output_die = 0;
25601 bool inlined_func;
25602
25603 /* Ignore blocks that are NULL. */
25604 if (stmt == NULL_TREE)
25605 return;
25606
25607 inlined_func = inlined_function_outer_scope_p (stmt);
25608
25609 /* If the block is one fragment of a non-contiguous block, do not
25610 process the variables, since they will have been done by the
25611 origin block. Do process subblocks. */
25612 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25613 {
25614 tree sub;
25615
25616 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25617 gen_block_die (sub, context_die);
25618
25619 return;
25620 }
25621
25622 /* Determine if we need to output any Dwarf DIEs at all to represent this
25623 block. */
25624 if (inlined_func)
25625 /* The outer scopes for inlinings *must* always be represented. We
25626 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25627 must_output_die = 1;
25628 else
25629 {
25630 /* Determine if this block directly contains any "significant"
25631 local declarations which we will need to output DIEs for. */
25632 if (debug_info_level > DINFO_LEVEL_TERSE)
25633 /* We are not in terse mode so *any* local declaration counts
25634 as being a "significant" one. */
25635 must_output_die = ((BLOCK_VARS (stmt) != NULL
25636 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25637 && (TREE_USED (stmt)
25638 || TREE_ASM_WRITTEN (stmt)
25639 || BLOCK_ABSTRACT (stmt)));
25640 else if ((TREE_USED (stmt)
25641 || TREE_ASM_WRITTEN (stmt)
25642 || BLOCK_ABSTRACT (stmt))
25643 && !dwarf2out_ignore_block (stmt))
25644 must_output_die = 1;
25645 }
25646
25647 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25648 DIE for any block which contains no significant local declarations at
25649 all. Rather, in such cases we just call `decls_for_scope' so that any
25650 needed Dwarf info for any sub-blocks will get properly generated. Note
25651 that in terse mode, our definition of what constitutes a "significant"
25652 local declaration gets restricted to include only inlined function
25653 instances and local (nested) function definitions. */
25654 if (must_output_die)
25655 {
25656 if (inlined_func)
25657 {
25658 /* If STMT block is abstract, that means we have been called
25659 indirectly from dwarf2out_abstract_function.
25660 That function rightfully marks the descendent blocks (of
25661 the abstract function it is dealing with) as being abstract,
25662 precisely to prevent us from emitting any
25663 DW_TAG_inlined_subroutine DIE as a descendent
25664 of an abstract function instance. So in that case, we should
25665 not call gen_inlined_subroutine_die.
25666
25667 Later though, when cgraph asks dwarf2out to emit info
25668 for the concrete instance of the function decl into which
25669 the concrete instance of STMT got inlined, the later will lead
25670 to the generation of a DW_TAG_inlined_subroutine DIE. */
25671 if (! BLOCK_ABSTRACT (stmt))
25672 gen_inlined_subroutine_die (stmt, context_die);
25673 }
25674 else
25675 gen_lexical_block_die (stmt, context_die);
25676 }
25677 else
25678 decls_for_scope (stmt, context_die);
25679 }
25680
25681 /* Process variable DECL (or variable with origin ORIGIN) within
25682 block STMT and add it to CONTEXT_DIE. */
25683 static void
25684 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25685 {
25686 dw_die_ref die;
25687 tree decl_or_origin = decl ? decl : origin;
25688
25689 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25690 die = lookup_decl_die (decl_or_origin);
25691 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25692 {
25693 if (TYPE_DECL_IS_STUB (decl_or_origin))
25694 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25695 else
25696 die = lookup_decl_die (decl_or_origin);
25697 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25698 if (! die && ! early_dwarf)
25699 return;
25700 }
25701 else
25702 die = NULL;
25703
25704 /* Avoid creating DIEs for local typedefs and concrete static variables that
25705 will only be pruned later. */
25706 if ((origin || decl_ultimate_origin (decl))
25707 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25708 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25709 {
25710 origin = decl_ultimate_origin (decl_or_origin);
25711 if (decl && VAR_P (decl) && die != NULL)
25712 {
25713 die = lookup_decl_die (origin);
25714 if (die != NULL)
25715 equate_decl_number_to_die (decl, die);
25716 }
25717 return;
25718 }
25719
25720 if (die != NULL && die->die_parent == NULL)
25721 add_child_die (context_die, die);
25722 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25723 {
25724 if (early_dwarf)
25725 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25726 stmt, context_die);
25727 }
25728 else
25729 {
25730 if (decl && DECL_P (decl))
25731 {
25732 die = lookup_decl_die (decl);
25733
25734 /* Early created DIEs do not have a parent as the decls refer
25735 to the function as DECL_CONTEXT rather than the BLOCK. */
25736 if (die && die->die_parent == NULL)
25737 {
25738 gcc_assert (in_lto_p);
25739 add_child_die (context_die, die);
25740 }
25741 }
25742
25743 gen_decl_die (decl, origin, NULL, context_die);
25744 }
25745 }
25746
25747 /* Generate all of the decls declared within a given scope and (recursively)
25748 all of its sub-blocks. */
25749
25750 static void
25751 decls_for_scope (tree stmt, dw_die_ref context_die)
25752 {
25753 tree decl;
25754 unsigned int i;
25755 tree subblocks;
25756
25757 /* Ignore NULL blocks. */
25758 if (stmt == NULL_TREE)
25759 return;
25760
25761 /* Output the DIEs to represent all of the data objects and typedefs
25762 declared directly within this block but not within any nested
25763 sub-blocks. Also, nested function and tag DIEs have been
25764 generated with a parent of NULL; fix that up now. We don't
25765 have to do this if we're at -g1. */
25766 if (debug_info_level > DINFO_LEVEL_TERSE)
25767 {
25768 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25769 process_scope_var (stmt, decl, NULL_TREE, context_die);
25770 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25771 origin - avoid doing this twice as we have no good way to see
25772 if we've done it once already. */
25773 if (! early_dwarf)
25774 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25775 {
25776 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25777 if (decl == current_function_decl)
25778 /* Ignore declarations of the current function, while they
25779 are declarations, gen_subprogram_die would treat them
25780 as definitions again, because they are equal to
25781 current_function_decl and endlessly recurse. */;
25782 else if (TREE_CODE (decl) == FUNCTION_DECL)
25783 process_scope_var (stmt, decl, NULL_TREE, context_die);
25784 else
25785 process_scope_var (stmt, NULL_TREE, decl, context_die);
25786 }
25787 }
25788
25789 /* Even if we're at -g1, we need to process the subblocks in order to get
25790 inlined call information. */
25791
25792 /* Output the DIEs to represent all sub-blocks (and the items declared
25793 therein) of this block. */
25794 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25795 subblocks != NULL;
25796 subblocks = BLOCK_CHAIN (subblocks))
25797 gen_block_die (subblocks, context_die);
25798 }
25799
25800 /* Is this a typedef we can avoid emitting? */
25801
25802 bool
25803 is_redundant_typedef (const_tree decl)
25804 {
25805 if (TYPE_DECL_IS_STUB (decl))
25806 return true;
25807
25808 if (DECL_ARTIFICIAL (decl)
25809 && DECL_CONTEXT (decl)
25810 && is_tagged_type (DECL_CONTEXT (decl))
25811 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25812 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25813 /* Also ignore the artificial member typedef for the class name. */
25814 return true;
25815
25816 return false;
25817 }
25818
25819 /* Return TRUE if TYPE is a typedef that names a type for linkage
25820 purposes. This kind of typedefs is produced by the C++ FE for
25821 constructs like:
25822
25823 typedef struct {...} foo;
25824
25825 In that case, there is no typedef variant type produced for foo.
25826 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25827 struct type. */
25828
25829 static bool
25830 is_naming_typedef_decl (const_tree decl)
25831 {
25832 if (decl == NULL_TREE
25833 || TREE_CODE (decl) != TYPE_DECL
25834 || DECL_NAMELESS (decl)
25835 || !is_tagged_type (TREE_TYPE (decl))
25836 || DECL_IS_BUILTIN (decl)
25837 || is_redundant_typedef (decl)
25838 /* It looks like Ada produces TYPE_DECLs that are very similar
25839 to C++ naming typedefs but that have different
25840 semantics. Let's be specific to c++ for now. */
25841 || !is_cxx (decl))
25842 return FALSE;
25843
25844 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25845 && TYPE_NAME (TREE_TYPE (decl)) == decl
25846 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25847 != TYPE_NAME (TREE_TYPE (decl))));
25848 }
25849
25850 /* Looks up the DIE for a context. */
25851
25852 static inline dw_die_ref
25853 lookup_context_die (tree context)
25854 {
25855 if (context)
25856 {
25857 /* Find die that represents this context. */
25858 if (TYPE_P (context))
25859 {
25860 context = TYPE_MAIN_VARIANT (context);
25861 dw_die_ref ctx = lookup_type_die (context);
25862 if (!ctx)
25863 return NULL;
25864 return strip_naming_typedef (context, ctx);
25865 }
25866 else
25867 return lookup_decl_die (context);
25868 }
25869 return comp_unit_die ();
25870 }
25871
25872 /* Returns the DIE for a context. */
25873
25874 static inline dw_die_ref
25875 get_context_die (tree context)
25876 {
25877 if (context)
25878 {
25879 /* Find die that represents this context. */
25880 if (TYPE_P (context))
25881 {
25882 context = TYPE_MAIN_VARIANT (context);
25883 return strip_naming_typedef (context, force_type_die (context));
25884 }
25885 else
25886 return force_decl_die (context);
25887 }
25888 return comp_unit_die ();
25889 }
25890
25891 /* Returns the DIE for decl. A DIE will always be returned. */
25892
25893 static dw_die_ref
25894 force_decl_die (tree decl)
25895 {
25896 dw_die_ref decl_die;
25897 unsigned saved_external_flag;
25898 tree save_fn = NULL_TREE;
25899 decl_die = lookup_decl_die (decl);
25900 if (!decl_die)
25901 {
25902 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25903
25904 decl_die = lookup_decl_die (decl);
25905 if (decl_die)
25906 return decl_die;
25907
25908 switch (TREE_CODE (decl))
25909 {
25910 case FUNCTION_DECL:
25911 /* Clear current_function_decl, so that gen_subprogram_die thinks
25912 that this is a declaration. At this point, we just want to force
25913 declaration die. */
25914 save_fn = current_function_decl;
25915 current_function_decl = NULL_TREE;
25916 gen_subprogram_die (decl, context_die);
25917 current_function_decl = save_fn;
25918 break;
25919
25920 case VAR_DECL:
25921 /* Set external flag to force declaration die. Restore it after
25922 gen_decl_die() call. */
25923 saved_external_flag = DECL_EXTERNAL (decl);
25924 DECL_EXTERNAL (decl) = 1;
25925 gen_decl_die (decl, NULL, NULL, context_die);
25926 DECL_EXTERNAL (decl) = saved_external_flag;
25927 break;
25928
25929 case NAMESPACE_DECL:
25930 if (dwarf_version >= 3 || !dwarf_strict)
25931 dwarf2out_decl (decl);
25932 else
25933 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25934 decl_die = comp_unit_die ();
25935 break;
25936
25937 case TRANSLATION_UNIT_DECL:
25938 decl_die = comp_unit_die ();
25939 break;
25940
25941 default:
25942 gcc_unreachable ();
25943 }
25944
25945 /* We should be able to find the DIE now. */
25946 if (!decl_die)
25947 decl_die = lookup_decl_die (decl);
25948 gcc_assert (decl_die);
25949 }
25950
25951 return decl_die;
25952 }
25953
25954 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25955 always returned. */
25956
25957 static dw_die_ref
25958 force_type_die (tree type)
25959 {
25960 dw_die_ref type_die;
25961
25962 type_die = lookup_type_die (type);
25963 if (!type_die)
25964 {
25965 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25966
25967 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25968 false, context_die);
25969 gcc_assert (type_die);
25970 }
25971 return type_die;
25972 }
25973
25974 /* Force out any required namespaces to be able to output DECL,
25975 and return the new context_die for it, if it's changed. */
25976
25977 static dw_die_ref
25978 setup_namespace_context (tree thing, dw_die_ref context_die)
25979 {
25980 tree context = (DECL_P (thing)
25981 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
25982 if (context && TREE_CODE (context) == NAMESPACE_DECL)
25983 /* Force out the namespace. */
25984 context_die = force_decl_die (context);
25985
25986 return context_die;
25987 }
25988
25989 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
25990 type) within its namespace, if appropriate.
25991
25992 For compatibility with older debuggers, namespace DIEs only contain
25993 declarations; all definitions are emitted at CU scope, with
25994 DW_AT_specification pointing to the declaration (like with class
25995 members). */
25996
25997 static dw_die_ref
25998 declare_in_namespace (tree thing, dw_die_ref context_die)
25999 {
26000 dw_die_ref ns_context;
26001
26002 if (debug_info_level <= DINFO_LEVEL_TERSE)
26003 return context_die;
26004
26005 /* External declarations in the local scope only need to be emitted
26006 once, not once in the namespace and once in the scope.
26007
26008 This avoids declaring the `extern' below in the
26009 namespace DIE as well as in the innermost scope:
26010
26011 namespace S
26012 {
26013 int i=5;
26014 int foo()
26015 {
26016 int i=8;
26017 extern int i;
26018 return i;
26019 }
26020 }
26021 */
26022 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26023 return context_die;
26024
26025 /* If this decl is from an inlined function, then don't try to emit it in its
26026 namespace, as we will get confused. It would have already been emitted
26027 when the abstract instance of the inline function was emitted anyways. */
26028 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26029 return context_die;
26030
26031 ns_context = setup_namespace_context (thing, context_die);
26032
26033 if (ns_context != context_die)
26034 {
26035 if (is_fortran ())
26036 return ns_context;
26037 if (DECL_P (thing))
26038 gen_decl_die (thing, NULL, NULL, ns_context);
26039 else
26040 gen_type_die (thing, ns_context);
26041 }
26042 return context_die;
26043 }
26044
26045 /* Generate a DIE for a namespace or namespace alias. */
26046
26047 static void
26048 gen_namespace_die (tree decl, dw_die_ref context_die)
26049 {
26050 dw_die_ref namespace_die;
26051
26052 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26053 they are an alias of. */
26054 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26055 {
26056 /* Output a real namespace or module. */
26057 context_die = setup_namespace_context (decl, comp_unit_die ());
26058 namespace_die = new_die (is_fortran ()
26059 ? DW_TAG_module : DW_TAG_namespace,
26060 context_die, decl);
26061 /* For Fortran modules defined in different CU don't add src coords. */
26062 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26063 {
26064 const char *name = dwarf2_name (decl, 0);
26065 if (name)
26066 add_name_attribute (namespace_die, name);
26067 }
26068 else
26069 add_name_and_src_coords_attributes (namespace_die, decl);
26070 if (DECL_EXTERNAL (decl))
26071 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26072 equate_decl_number_to_die (decl, namespace_die);
26073 }
26074 else
26075 {
26076 /* Output a namespace alias. */
26077
26078 /* Force out the namespace we are an alias of, if necessary. */
26079 dw_die_ref origin_die
26080 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26081
26082 if (DECL_FILE_SCOPE_P (decl)
26083 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26084 context_die = setup_namespace_context (decl, comp_unit_die ());
26085 /* Now create the namespace alias DIE. */
26086 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26087 add_name_and_src_coords_attributes (namespace_die, decl);
26088 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26089 equate_decl_number_to_die (decl, namespace_die);
26090 }
26091 if ((dwarf_version >= 5 || !dwarf_strict)
26092 && lang_hooks.decls.decl_dwarf_attribute (decl,
26093 DW_AT_export_symbols) == 1)
26094 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26095
26096 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26097 if (want_pubnames ())
26098 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26099 }
26100
26101 /* Generate Dwarf debug information for a decl described by DECL.
26102 The return value is currently only meaningful for PARM_DECLs,
26103 for all other decls it returns NULL.
26104
26105 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26106 It can be NULL otherwise. */
26107
26108 static dw_die_ref
26109 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26110 dw_die_ref context_die)
26111 {
26112 tree decl_or_origin = decl ? decl : origin;
26113 tree class_origin = NULL, ultimate_origin;
26114
26115 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26116 return NULL;
26117
26118 /* Ignore pointer bounds decls. */
26119 if (DECL_P (decl_or_origin)
26120 && TREE_TYPE (decl_or_origin)
26121 && POINTER_BOUNDS_P (decl_or_origin))
26122 return NULL;
26123
26124 switch (TREE_CODE (decl_or_origin))
26125 {
26126 case ERROR_MARK:
26127 break;
26128
26129 case CONST_DECL:
26130 if (!is_fortran () && !is_ada ())
26131 {
26132 /* The individual enumerators of an enum type get output when we output
26133 the Dwarf representation of the relevant enum type itself. */
26134 break;
26135 }
26136
26137 /* Emit its type. */
26138 gen_type_die (TREE_TYPE (decl), context_die);
26139
26140 /* And its containing namespace. */
26141 context_die = declare_in_namespace (decl, context_die);
26142
26143 gen_const_die (decl, context_die);
26144 break;
26145
26146 case FUNCTION_DECL:
26147 #if 0
26148 /* FIXME */
26149 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26150 on local redeclarations of global functions. That seems broken. */
26151 if (current_function_decl != decl)
26152 /* This is only a declaration. */;
26153 #endif
26154
26155 /* We should have abstract copies already and should not generate
26156 stray type DIEs in late LTO dumping. */
26157 if (! early_dwarf)
26158 ;
26159
26160 /* If we're emitting a clone, emit info for the abstract instance. */
26161 else if (origin || DECL_ORIGIN (decl) != decl)
26162 dwarf2out_abstract_function (origin
26163 ? DECL_ORIGIN (origin)
26164 : DECL_ABSTRACT_ORIGIN (decl));
26165
26166 /* If we're emitting a possibly inlined function emit it as
26167 abstract instance. */
26168 else if (cgraph_function_possibly_inlined_p (decl)
26169 && ! DECL_ABSTRACT_P (decl)
26170 && ! class_or_namespace_scope_p (context_die)
26171 /* dwarf2out_abstract_function won't emit a die if this is just
26172 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26173 that case, because that works only if we have a die. */
26174 && DECL_INITIAL (decl) != NULL_TREE)
26175 dwarf2out_abstract_function (decl);
26176
26177 /* Otherwise we're emitting the primary DIE for this decl. */
26178 else if (debug_info_level > DINFO_LEVEL_TERSE)
26179 {
26180 /* Before we describe the FUNCTION_DECL itself, make sure that we
26181 have its containing type. */
26182 if (!origin)
26183 origin = decl_class_context (decl);
26184 if (origin != NULL_TREE)
26185 gen_type_die (origin, context_die);
26186
26187 /* And its return type. */
26188 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26189
26190 /* And its virtual context. */
26191 if (DECL_VINDEX (decl) != NULL_TREE)
26192 gen_type_die (DECL_CONTEXT (decl), context_die);
26193
26194 /* Make sure we have a member DIE for decl. */
26195 if (origin != NULL_TREE)
26196 gen_type_die_for_member (origin, decl, context_die);
26197
26198 /* And its containing namespace. */
26199 context_die = declare_in_namespace (decl, context_die);
26200 }
26201
26202 /* Now output a DIE to represent the function itself. */
26203 if (decl)
26204 gen_subprogram_die (decl, context_die);
26205 break;
26206
26207 case TYPE_DECL:
26208 /* If we are in terse mode, don't generate any DIEs to represent any
26209 actual typedefs. */
26210 if (debug_info_level <= DINFO_LEVEL_TERSE)
26211 break;
26212
26213 /* In the special case of a TYPE_DECL node representing the declaration
26214 of some type tag, if the given TYPE_DECL is marked as having been
26215 instantiated from some other (original) TYPE_DECL node (e.g. one which
26216 was generated within the original definition of an inline function) we
26217 used to generate a special (abbreviated) DW_TAG_structure_type,
26218 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26219 should be actually referencing those DIEs, as variable DIEs with that
26220 type would be emitted already in the abstract origin, so it was always
26221 removed during unused type prunning. Don't add anything in this
26222 case. */
26223 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26224 break;
26225
26226 if (is_redundant_typedef (decl))
26227 gen_type_die (TREE_TYPE (decl), context_die);
26228 else
26229 /* Output a DIE to represent the typedef itself. */
26230 gen_typedef_die (decl, context_die);
26231 break;
26232
26233 case LABEL_DECL:
26234 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26235 gen_label_die (decl, context_die);
26236 break;
26237
26238 case VAR_DECL:
26239 case RESULT_DECL:
26240 /* If we are in terse mode, don't generate any DIEs to represent any
26241 variable declarations or definitions. */
26242 if (debug_info_level <= DINFO_LEVEL_TERSE)
26243 break;
26244
26245 /* Avoid generating stray type DIEs during late dwarf dumping.
26246 All types have been dumped early. */
26247 if (early_dwarf
26248 /* ??? But in LTRANS we cannot annotate early created variably
26249 modified type DIEs without copying them and adjusting all
26250 references to them. Dump them again as happens for inlining
26251 which copies both the decl and the types. */
26252 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26253 in VLA bound information for example. */
26254 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26255 current_function_decl)))
26256 {
26257 /* Output any DIEs that are needed to specify the type of this data
26258 object. */
26259 if (decl_by_reference_p (decl_or_origin))
26260 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26261 else
26262 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26263 }
26264
26265 if (early_dwarf)
26266 {
26267 /* And its containing type. */
26268 class_origin = decl_class_context (decl_or_origin);
26269 if (class_origin != NULL_TREE)
26270 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26271
26272 /* And its containing namespace. */
26273 context_die = declare_in_namespace (decl_or_origin, context_die);
26274 }
26275
26276 /* Now output the DIE to represent the data object itself. This gets
26277 complicated because of the possibility that the VAR_DECL really
26278 represents an inlined instance of a formal parameter for an inline
26279 function. */
26280 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26281 if (ultimate_origin != NULL_TREE
26282 && TREE_CODE (ultimate_origin) == PARM_DECL)
26283 gen_formal_parameter_die (decl, origin,
26284 true /* Emit name attribute. */,
26285 context_die);
26286 else
26287 gen_variable_die (decl, origin, context_die);
26288 break;
26289
26290 case FIELD_DECL:
26291 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26292 /* Ignore the nameless fields that are used to skip bits but handle C++
26293 anonymous unions and structs. */
26294 if (DECL_NAME (decl) != NULL_TREE
26295 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26296 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26297 {
26298 gen_type_die (member_declared_type (decl), context_die);
26299 gen_field_die (decl, ctx, context_die);
26300 }
26301 break;
26302
26303 case PARM_DECL:
26304 /* Avoid generating stray type DIEs during late dwarf dumping.
26305 All types have been dumped early. */
26306 if (early_dwarf
26307 /* ??? But in LTRANS we cannot annotate early created variably
26308 modified type DIEs without copying them and adjusting all
26309 references to them. Dump them again as happens for inlining
26310 which copies both the decl and the types. */
26311 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26312 in VLA bound information for example. */
26313 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26314 current_function_decl)))
26315 {
26316 if (DECL_BY_REFERENCE (decl_or_origin))
26317 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26318 else
26319 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26320 }
26321 return gen_formal_parameter_die (decl, origin,
26322 true /* Emit name attribute. */,
26323 context_die);
26324
26325 case NAMESPACE_DECL:
26326 if (dwarf_version >= 3 || !dwarf_strict)
26327 gen_namespace_die (decl, context_die);
26328 break;
26329
26330 case IMPORTED_DECL:
26331 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26332 DECL_CONTEXT (decl), context_die);
26333 break;
26334
26335 case NAMELIST_DECL:
26336 gen_namelist_decl (DECL_NAME (decl), context_die,
26337 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26338 break;
26339
26340 default:
26341 /* Probably some frontend-internal decl. Assume we don't care. */
26342 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26343 break;
26344 }
26345
26346 return NULL;
26347 }
26348 \f
26349 /* Output initial debug information for global DECL. Called at the
26350 end of the parsing process.
26351
26352 This is the initial debug generation process. As such, the DIEs
26353 generated may be incomplete. A later debug generation pass
26354 (dwarf2out_late_global_decl) will augment the information generated
26355 in this pass (e.g., with complete location info). */
26356
26357 static void
26358 dwarf2out_early_global_decl (tree decl)
26359 {
26360 set_early_dwarf s;
26361
26362 /* gen_decl_die() will set DECL_ABSTRACT because
26363 cgraph_function_possibly_inlined_p() returns true. This is in
26364 turn will cause DW_AT_inline attributes to be set.
26365
26366 This happens because at early dwarf generation, there is no
26367 cgraph information, causing cgraph_function_possibly_inlined_p()
26368 to return true. Trick cgraph_function_possibly_inlined_p()
26369 while we generate dwarf early. */
26370 bool save = symtab->global_info_ready;
26371 symtab->global_info_ready = true;
26372
26373 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26374 other DECLs and they can point to template types or other things
26375 that dwarf2out can't handle when done via dwarf2out_decl. */
26376 if (TREE_CODE (decl) != TYPE_DECL
26377 && TREE_CODE (decl) != PARM_DECL)
26378 {
26379 if (TREE_CODE (decl) == FUNCTION_DECL)
26380 {
26381 tree save_fndecl = current_function_decl;
26382
26383 /* For nested functions, make sure we have DIEs for the parents first
26384 so that all nested DIEs are generated at the proper scope in the
26385 first shot. */
26386 tree context = decl_function_context (decl);
26387 if (context != NULL)
26388 {
26389 dw_die_ref context_die = lookup_decl_die (context);
26390 current_function_decl = context;
26391
26392 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26393 enough so that it lands in its own context. This avoids type
26394 pruning issues later on. */
26395 if (context_die == NULL || is_declaration_die (context_die))
26396 dwarf2out_decl (context);
26397 }
26398
26399 /* Emit an abstract origin of a function first. This happens
26400 with C++ constructor clones for example and makes
26401 dwarf2out_abstract_function happy which requires the early
26402 DIE of the abstract instance to be present. */
26403 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26404 dw_die_ref origin_die;
26405 if (origin != NULL
26406 /* Do not emit the DIE multiple times but make sure to
26407 process it fully here in case we just saw a declaration. */
26408 && ((origin_die = lookup_decl_die (origin)) == NULL
26409 || is_declaration_die (origin_die)))
26410 {
26411 current_function_decl = origin;
26412 dwarf2out_decl (origin);
26413 }
26414
26415 /* Emit the DIE for decl but avoid doing that multiple times. */
26416 dw_die_ref old_die;
26417 if ((old_die = lookup_decl_die (decl)) == NULL
26418 || is_declaration_die (old_die))
26419 {
26420 current_function_decl = decl;
26421 dwarf2out_decl (decl);
26422 }
26423
26424 current_function_decl = save_fndecl;
26425 }
26426 else
26427 dwarf2out_decl (decl);
26428 }
26429 symtab->global_info_ready = save;
26430 }
26431
26432 /* Return whether EXPR is an expression with the following pattern:
26433 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26434
26435 static bool
26436 is_trivial_indirect_ref (tree expr)
26437 {
26438 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26439 return false;
26440
26441 tree nop = TREE_OPERAND (expr, 0);
26442 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26443 return false;
26444
26445 tree int_cst = TREE_OPERAND (nop, 0);
26446 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26447 }
26448
26449 /* Output debug information for global decl DECL. Called from
26450 toplev.c after compilation proper has finished. */
26451
26452 static void
26453 dwarf2out_late_global_decl (tree decl)
26454 {
26455 /* Fill-in any location information we were unable to determine
26456 on the first pass. */
26457 if (VAR_P (decl) && !POINTER_BOUNDS_P (decl))
26458 {
26459 dw_die_ref die = lookup_decl_die (decl);
26460
26461 /* We may have to generate early debug late for LTO in case debug
26462 was not enabled at compile-time or the target doesn't support
26463 the LTO early debug scheme. */
26464 if (! die && in_lto_p)
26465 {
26466 dwarf2out_decl (decl);
26467 die = lookup_decl_die (decl);
26468 }
26469
26470 if (die)
26471 {
26472 /* We get called via the symtab code invoking late_global_decl
26473 for symbols that are optimized out.
26474
26475 Do not add locations for those, except if they have a
26476 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26477 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26478 INDIRECT_REF expression, as this could generate relocations to
26479 text symbols in LTO object files, which is invalid. */
26480 varpool_node *node = varpool_node::get (decl);
26481 if ((! node || ! node->definition)
26482 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26483 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26484 tree_add_const_value_attribute_for_decl (die, decl);
26485 else
26486 add_location_or_const_value_attribute (die, decl, false);
26487 }
26488 }
26489 }
26490
26491 /* Output debug information for type decl DECL. Called from toplev.c
26492 and from language front ends (to record built-in types). */
26493 static void
26494 dwarf2out_type_decl (tree decl, int local)
26495 {
26496 if (!local)
26497 {
26498 set_early_dwarf s;
26499 dwarf2out_decl (decl);
26500 }
26501 }
26502
26503 /* Output debug information for imported module or decl DECL.
26504 NAME is non-NULL name in the lexical block if the decl has been renamed.
26505 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26506 that DECL belongs to.
26507 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26508 static void
26509 dwarf2out_imported_module_or_decl_1 (tree decl,
26510 tree name,
26511 tree lexical_block,
26512 dw_die_ref lexical_block_die)
26513 {
26514 expanded_location xloc;
26515 dw_die_ref imported_die = NULL;
26516 dw_die_ref at_import_die;
26517
26518 if (TREE_CODE (decl) == IMPORTED_DECL)
26519 {
26520 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26521 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26522 gcc_assert (decl);
26523 }
26524 else
26525 xloc = expand_location (input_location);
26526
26527 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26528 {
26529 at_import_die = force_type_die (TREE_TYPE (decl));
26530 /* For namespace N { typedef void T; } using N::T; base_type_die
26531 returns NULL, but DW_TAG_imported_declaration requires
26532 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26533 if (!at_import_die)
26534 {
26535 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26536 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26537 at_import_die = lookup_type_die (TREE_TYPE (decl));
26538 gcc_assert (at_import_die);
26539 }
26540 }
26541 else
26542 {
26543 at_import_die = lookup_decl_die (decl);
26544 if (!at_import_die)
26545 {
26546 /* If we're trying to avoid duplicate debug info, we may not have
26547 emitted the member decl for this field. Emit it now. */
26548 if (TREE_CODE (decl) == FIELD_DECL)
26549 {
26550 tree type = DECL_CONTEXT (decl);
26551
26552 if (TYPE_CONTEXT (type)
26553 && TYPE_P (TYPE_CONTEXT (type))
26554 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26555 DINFO_USAGE_DIR_USE))
26556 return;
26557 gen_type_die_for_member (type, decl,
26558 get_context_die (TYPE_CONTEXT (type)));
26559 }
26560 if (TREE_CODE (decl) == NAMELIST_DECL)
26561 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26562 get_context_die (DECL_CONTEXT (decl)),
26563 NULL_TREE);
26564 else
26565 at_import_die = force_decl_die (decl);
26566 }
26567 }
26568
26569 if (TREE_CODE (decl) == NAMESPACE_DECL)
26570 {
26571 if (dwarf_version >= 3 || !dwarf_strict)
26572 imported_die = new_die (DW_TAG_imported_module,
26573 lexical_block_die,
26574 lexical_block);
26575 else
26576 return;
26577 }
26578 else
26579 imported_die = new_die (DW_TAG_imported_declaration,
26580 lexical_block_die,
26581 lexical_block);
26582
26583 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26584 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26585 if (debug_column_info && xloc.column)
26586 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26587 if (name)
26588 add_AT_string (imported_die, DW_AT_name,
26589 IDENTIFIER_POINTER (name));
26590 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26591 }
26592
26593 /* Output debug information for imported module or decl DECL.
26594 NAME is non-NULL name in context if the decl has been renamed.
26595 CHILD is true if decl is one of the renamed decls as part of
26596 importing whole module.
26597 IMPLICIT is set if this hook is called for an implicit import
26598 such as inline namespace. */
26599
26600 static void
26601 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26602 bool child, bool implicit)
26603 {
26604 /* dw_die_ref at_import_die; */
26605 dw_die_ref scope_die;
26606
26607 if (debug_info_level <= DINFO_LEVEL_TERSE)
26608 return;
26609
26610 gcc_assert (decl);
26611
26612 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26613 should be enough, for DWARF4 and older even if we emit as extension
26614 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26615 for the benefit of consumers unaware of DW_AT_export_symbols. */
26616 if (implicit
26617 && dwarf_version >= 5
26618 && lang_hooks.decls.decl_dwarf_attribute (decl,
26619 DW_AT_export_symbols) == 1)
26620 return;
26621
26622 set_early_dwarf s;
26623
26624 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26625 We need decl DIE for reference and scope die. First, get DIE for the decl
26626 itself. */
26627
26628 /* Get the scope die for decl context. Use comp_unit_die for global module
26629 or decl. If die is not found for non globals, force new die. */
26630 if (context
26631 && TYPE_P (context)
26632 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26633 return;
26634
26635 scope_die = get_context_die (context);
26636
26637 if (child)
26638 {
26639 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26640 there is nothing we can do, here. */
26641 if (dwarf_version < 3 && dwarf_strict)
26642 return;
26643
26644 gcc_assert (scope_die->die_child);
26645 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26646 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26647 scope_die = scope_die->die_child;
26648 }
26649
26650 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26651 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26652 }
26653
26654 /* Output debug information for namelists. */
26655
26656 static dw_die_ref
26657 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26658 {
26659 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26660 tree value;
26661 unsigned i;
26662
26663 if (debug_info_level <= DINFO_LEVEL_TERSE)
26664 return NULL;
26665
26666 gcc_assert (scope_die != NULL);
26667 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26668 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26669
26670 /* If there are no item_decls, we have a nondefining namelist, e.g.
26671 with USE association; hence, set DW_AT_declaration. */
26672 if (item_decls == NULL_TREE)
26673 {
26674 add_AT_flag (nml_die, DW_AT_declaration, 1);
26675 return nml_die;
26676 }
26677
26678 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26679 {
26680 nml_item_ref_die = lookup_decl_die (value);
26681 if (!nml_item_ref_die)
26682 nml_item_ref_die = force_decl_die (value);
26683
26684 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26685 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26686 }
26687 return nml_die;
26688 }
26689
26690
26691 /* Write the debugging output for DECL and return the DIE. */
26692
26693 static void
26694 dwarf2out_decl (tree decl)
26695 {
26696 dw_die_ref context_die = comp_unit_die ();
26697
26698 switch (TREE_CODE (decl))
26699 {
26700 case ERROR_MARK:
26701 return;
26702
26703 case FUNCTION_DECL:
26704 /* If we're a nested function, initially use a parent of NULL; if we're
26705 a plain function, this will be fixed up in decls_for_scope. If
26706 we're a method, it will be ignored, since we already have a DIE. */
26707 if (decl_function_context (decl)
26708 /* But if we're in terse mode, we don't care about scope. */
26709 && debug_info_level > DINFO_LEVEL_TERSE)
26710 context_die = NULL;
26711 break;
26712
26713 case VAR_DECL:
26714 /* For local statics lookup proper context die. */
26715 if (local_function_static (decl))
26716 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26717
26718 /* If we are in terse mode, don't generate any DIEs to represent any
26719 variable declarations or definitions. */
26720 if (debug_info_level <= DINFO_LEVEL_TERSE)
26721 return;
26722 break;
26723
26724 case CONST_DECL:
26725 if (debug_info_level <= DINFO_LEVEL_TERSE)
26726 return;
26727 if (!is_fortran () && !is_ada ())
26728 return;
26729 if (TREE_STATIC (decl) && decl_function_context (decl))
26730 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26731 break;
26732
26733 case NAMESPACE_DECL:
26734 case IMPORTED_DECL:
26735 if (debug_info_level <= DINFO_LEVEL_TERSE)
26736 return;
26737 if (lookup_decl_die (decl) != NULL)
26738 return;
26739 break;
26740
26741 case TYPE_DECL:
26742 /* Don't emit stubs for types unless they are needed by other DIEs. */
26743 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26744 return;
26745
26746 /* Don't bother trying to generate any DIEs to represent any of the
26747 normal built-in types for the language we are compiling. */
26748 if (DECL_IS_BUILTIN (decl))
26749 return;
26750
26751 /* If we are in terse mode, don't generate any DIEs for types. */
26752 if (debug_info_level <= DINFO_LEVEL_TERSE)
26753 return;
26754
26755 /* If we're a function-scope tag, initially use a parent of NULL;
26756 this will be fixed up in decls_for_scope. */
26757 if (decl_function_context (decl))
26758 context_die = NULL;
26759
26760 break;
26761
26762 case NAMELIST_DECL:
26763 break;
26764
26765 default:
26766 return;
26767 }
26768
26769 gen_decl_die (decl, NULL, NULL, context_die);
26770
26771 if (flag_checking)
26772 {
26773 dw_die_ref die = lookup_decl_die (decl);
26774 if (die)
26775 check_die (die);
26776 }
26777 }
26778
26779 /* Write the debugging output for DECL. */
26780
26781 static void
26782 dwarf2out_function_decl (tree decl)
26783 {
26784 dwarf2out_decl (decl);
26785 call_arg_locations = NULL;
26786 call_arg_loc_last = NULL;
26787 call_site_count = -1;
26788 tail_call_site_count = -1;
26789 decl_loc_table->empty ();
26790 cached_dw_loc_list_table->empty ();
26791 }
26792
26793 /* Output a marker (i.e. a label) for the beginning of the generated code for
26794 a lexical block. */
26795
26796 static void
26797 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26798 unsigned int blocknum)
26799 {
26800 switch_to_section (current_function_section ());
26801 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26802 }
26803
26804 /* Output a marker (i.e. a label) for the end of the generated code for a
26805 lexical block. */
26806
26807 static void
26808 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26809 {
26810 switch_to_section (current_function_section ());
26811 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26812 }
26813
26814 /* Returns nonzero if it is appropriate not to emit any debugging
26815 information for BLOCK, because it doesn't contain any instructions.
26816
26817 Don't allow this for blocks with nested functions or local classes
26818 as we would end up with orphans, and in the presence of scheduling
26819 we may end up calling them anyway. */
26820
26821 static bool
26822 dwarf2out_ignore_block (const_tree block)
26823 {
26824 tree decl;
26825 unsigned int i;
26826
26827 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26828 if (TREE_CODE (decl) == FUNCTION_DECL
26829 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26830 return 0;
26831 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26832 {
26833 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26834 if (TREE_CODE (decl) == FUNCTION_DECL
26835 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26836 return 0;
26837 }
26838
26839 return 1;
26840 }
26841
26842 /* Hash table routines for file_hash. */
26843
26844 bool
26845 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26846 {
26847 return filename_cmp (p1->filename, p2) == 0;
26848 }
26849
26850 hashval_t
26851 dwarf_file_hasher::hash (dwarf_file_data *p)
26852 {
26853 return htab_hash_string (p->filename);
26854 }
26855
26856 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26857 dwarf2out.c) and return its "index". The index of each (known) filename is
26858 just a unique number which is associated with only that one filename. We
26859 need such numbers for the sake of generating labels (in the .debug_sfnames
26860 section) and references to those files numbers (in the .debug_srcinfo
26861 and .debug_macinfo sections). If the filename given as an argument is not
26862 found in our current list, add it to the list and assign it the next
26863 available unique index number. */
26864
26865 static struct dwarf_file_data *
26866 lookup_filename (const char *file_name)
26867 {
26868 struct dwarf_file_data * created;
26869
26870 if (!file_name)
26871 return NULL;
26872
26873 dwarf_file_data **slot
26874 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26875 INSERT);
26876 if (*slot)
26877 return *slot;
26878
26879 created = ggc_alloc<dwarf_file_data> ();
26880 created->filename = file_name;
26881 created->emitted_number = 0;
26882 *slot = created;
26883 return created;
26884 }
26885
26886 /* If the assembler will construct the file table, then translate the compiler
26887 internal file table number into the assembler file table number, and emit
26888 a .file directive if we haven't already emitted one yet. The file table
26889 numbers are different because we prune debug info for unused variables and
26890 types, which may include filenames. */
26891
26892 static int
26893 maybe_emit_file (struct dwarf_file_data * fd)
26894 {
26895 if (! fd->emitted_number)
26896 {
26897 if (last_emitted_file)
26898 fd->emitted_number = last_emitted_file->emitted_number + 1;
26899 else
26900 fd->emitted_number = 1;
26901 last_emitted_file = fd;
26902
26903 if (output_asm_line_debug_info ())
26904 {
26905 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26906 output_quoted_string (asm_out_file,
26907 remap_debug_filename (fd->filename));
26908 fputc ('\n', asm_out_file);
26909 }
26910 }
26911
26912 return fd->emitted_number;
26913 }
26914
26915 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26916 That generation should happen after function debug info has been
26917 generated. The value of the attribute is the constant value of ARG. */
26918
26919 static void
26920 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26921 {
26922 die_arg_entry entry;
26923
26924 if (!die || !arg)
26925 return;
26926
26927 gcc_assert (early_dwarf);
26928
26929 if (!tmpl_value_parm_die_table)
26930 vec_alloc (tmpl_value_parm_die_table, 32);
26931
26932 entry.die = die;
26933 entry.arg = arg;
26934 vec_safe_push (tmpl_value_parm_die_table, entry);
26935 }
26936
26937 /* Return TRUE if T is an instance of generic type, FALSE
26938 otherwise. */
26939
26940 static bool
26941 generic_type_p (tree t)
26942 {
26943 if (t == NULL_TREE || !TYPE_P (t))
26944 return false;
26945 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26946 }
26947
26948 /* Schedule the generation of the generic parameter dies for the
26949 instance of generic type T. The proper generation itself is later
26950 done by gen_scheduled_generic_parms_dies. */
26951
26952 static void
26953 schedule_generic_params_dies_gen (tree t)
26954 {
26955 if (!generic_type_p (t))
26956 return;
26957
26958 gcc_assert (early_dwarf);
26959
26960 if (!generic_type_instances)
26961 vec_alloc (generic_type_instances, 256);
26962
26963 vec_safe_push (generic_type_instances, t);
26964 }
26965
26966 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26967 by append_entry_to_tmpl_value_parm_die_table. This function must
26968 be called after function DIEs have been generated. */
26969
26970 static void
26971 gen_remaining_tmpl_value_param_die_attribute (void)
26972 {
26973 if (tmpl_value_parm_die_table)
26974 {
26975 unsigned i, j;
26976 die_arg_entry *e;
26977
26978 /* We do this in two phases - first get the cases we can
26979 handle during early-finish, preserving those we cannot
26980 (containing symbolic constants where we don't yet know
26981 whether we are going to output the referenced symbols).
26982 For those we try again at late-finish. */
26983 j = 0;
26984 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
26985 {
26986 if (!e->die->removed
26987 && !tree_add_const_value_attribute (e->die, e->arg))
26988 {
26989 dw_loc_descr_ref loc = NULL;
26990 if (! early_dwarf
26991 && (dwarf_version >= 5 || !dwarf_strict))
26992 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
26993 if (loc)
26994 add_AT_loc (e->die, DW_AT_location, loc);
26995 else
26996 (*tmpl_value_parm_die_table)[j++] = *e;
26997 }
26998 }
26999 tmpl_value_parm_die_table->truncate (j);
27000 }
27001 }
27002
27003 /* Generate generic parameters DIEs for instances of generic types
27004 that have been previously scheduled by
27005 schedule_generic_params_dies_gen. This function must be called
27006 after all the types of the CU have been laid out. */
27007
27008 static void
27009 gen_scheduled_generic_parms_dies (void)
27010 {
27011 unsigned i;
27012 tree t;
27013
27014 if (!generic_type_instances)
27015 return;
27016
27017 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27018 if (COMPLETE_TYPE_P (t))
27019 gen_generic_params_dies (t);
27020
27021 generic_type_instances = NULL;
27022 }
27023
27024
27025 /* Replace DW_AT_name for the decl with name. */
27026
27027 static void
27028 dwarf2out_set_name (tree decl, tree name)
27029 {
27030 dw_die_ref die;
27031 dw_attr_node *attr;
27032 const char *dname;
27033
27034 die = TYPE_SYMTAB_DIE (decl);
27035 if (!die)
27036 return;
27037
27038 dname = dwarf2_name (name, 0);
27039 if (!dname)
27040 return;
27041
27042 attr = get_AT (die, DW_AT_name);
27043 if (attr)
27044 {
27045 struct indirect_string_node *node;
27046
27047 node = find_AT_string (dname);
27048 /* replace the string. */
27049 attr->dw_attr_val.v.val_str = node;
27050 }
27051
27052 else
27053 add_name_attribute (die, dname);
27054 }
27055
27056 /* True if before or during processing of the first function being emitted. */
27057 static bool in_first_function_p = true;
27058 /* True if loc_note during dwarf2out_var_location call might still be
27059 before first real instruction at address equal to .Ltext0. */
27060 static bool maybe_at_text_label_p = true;
27061 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27062 static unsigned int first_loclabel_num_not_at_text_label;
27063
27064 /* Look ahead for a real insn, or for a begin stmt marker. */
27065
27066 static rtx_insn *
27067 dwarf2out_next_real_insn (rtx_insn *loc_note)
27068 {
27069 rtx_insn *next_real = NEXT_INSN (loc_note);
27070
27071 while (next_real)
27072 if (INSN_P (next_real))
27073 break;
27074 else
27075 next_real = NEXT_INSN (next_real);
27076
27077 return next_real;
27078 }
27079
27080 /* Called by the final INSN scan whenever we see a var location. We
27081 use it to drop labels in the right places, and throw the location in
27082 our lookup table. */
27083
27084 static void
27085 dwarf2out_var_location (rtx_insn *loc_note)
27086 {
27087 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27088 struct var_loc_node *newloc;
27089 rtx_insn *next_real, *next_note;
27090 rtx_insn *call_insn = NULL;
27091 static const char *last_label;
27092 static const char *last_postcall_label;
27093 static bool last_in_cold_section_p;
27094 static rtx_insn *expected_next_loc_note;
27095 tree decl;
27096 bool var_loc_p;
27097 var_loc_view view = 0;
27098
27099 if (!NOTE_P (loc_note))
27100 {
27101 if (CALL_P (loc_note))
27102 {
27103 maybe_reset_location_view (loc_note, cur_line_info_table);
27104 call_site_count++;
27105 if (SIBLING_CALL_P (loc_note))
27106 tail_call_site_count++;
27107 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27108 {
27109 call_insn = loc_note;
27110 loc_note = NULL;
27111 var_loc_p = false;
27112
27113 next_real = dwarf2out_next_real_insn (call_insn);
27114 next_note = NULL;
27115 cached_next_real_insn = NULL;
27116 goto create_label;
27117 }
27118 if (optimize == 0 && !flag_var_tracking)
27119 {
27120 /* When the var-tracking pass is not running, there is no note
27121 for indirect calls whose target is compile-time known. In this
27122 case, process such calls specifically so that we generate call
27123 sites for them anyway. */
27124 rtx x = PATTERN (loc_note);
27125 if (GET_CODE (x) == PARALLEL)
27126 x = XVECEXP (x, 0, 0);
27127 if (GET_CODE (x) == SET)
27128 x = SET_SRC (x);
27129 if (GET_CODE (x) == CALL)
27130 x = XEXP (x, 0);
27131 if (!MEM_P (x)
27132 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27133 || !SYMBOL_REF_DECL (XEXP (x, 0))
27134 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27135 != FUNCTION_DECL))
27136 {
27137 call_insn = loc_note;
27138 loc_note = NULL;
27139 var_loc_p = false;
27140
27141 next_real = dwarf2out_next_real_insn (call_insn);
27142 next_note = NULL;
27143 cached_next_real_insn = NULL;
27144 goto create_label;
27145 }
27146 }
27147 }
27148 else if (!debug_variable_location_views)
27149 gcc_unreachable ();
27150 else
27151 maybe_reset_location_view (loc_note, cur_line_info_table);
27152
27153 return;
27154 }
27155
27156 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27157 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27158 return;
27159
27160 /* Optimize processing a large consecutive sequence of location
27161 notes so we don't spend too much time in next_real_insn. If the
27162 next insn is another location note, remember the next_real_insn
27163 calculation for next time. */
27164 next_real = cached_next_real_insn;
27165 if (next_real)
27166 {
27167 if (expected_next_loc_note != loc_note)
27168 next_real = NULL;
27169 }
27170
27171 next_note = NEXT_INSN (loc_note);
27172 if (! next_note
27173 || next_note->deleted ()
27174 || ! NOTE_P (next_note)
27175 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27176 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27177 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27178 next_note = NULL;
27179
27180 if (! next_real)
27181 next_real = dwarf2out_next_real_insn (loc_note);
27182
27183 if (next_note)
27184 {
27185 expected_next_loc_note = next_note;
27186 cached_next_real_insn = next_real;
27187 }
27188 else
27189 cached_next_real_insn = NULL;
27190
27191 /* If there are no instructions which would be affected by this note,
27192 don't do anything. */
27193 if (var_loc_p
27194 && next_real == NULL_RTX
27195 && !NOTE_DURING_CALL_P (loc_note))
27196 return;
27197
27198 create_label:
27199
27200 if (next_real == NULL_RTX)
27201 next_real = get_last_insn ();
27202
27203 /* If there were any real insns between note we processed last time
27204 and this note (or if it is the first note), clear
27205 last_{,postcall_}label so that they are not reused this time. */
27206 if (last_var_location_insn == NULL_RTX
27207 || last_var_location_insn != next_real
27208 || last_in_cold_section_p != in_cold_section_p)
27209 {
27210 last_label = NULL;
27211 last_postcall_label = NULL;
27212 }
27213
27214 if (var_loc_p)
27215 {
27216 const char *label
27217 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27218 view = cur_line_info_table->view;
27219 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27220 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27221 if (newloc == NULL)
27222 return;
27223 }
27224 else
27225 {
27226 decl = NULL_TREE;
27227 newloc = NULL;
27228 }
27229
27230 /* If there were no real insns between note we processed last time
27231 and this note, use the label we emitted last time. Otherwise
27232 create a new label and emit it. */
27233 if (last_label == NULL)
27234 {
27235 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27236 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27237 loclabel_num++;
27238 last_label = ggc_strdup (loclabel);
27239 /* See if loclabel might be equal to .Ltext0. If yes,
27240 bump first_loclabel_num_not_at_text_label. */
27241 if (!have_multiple_function_sections
27242 && in_first_function_p
27243 && maybe_at_text_label_p)
27244 {
27245 static rtx_insn *last_start;
27246 rtx_insn *insn;
27247 for (insn = loc_note; insn; insn = previous_insn (insn))
27248 if (insn == last_start)
27249 break;
27250 else if (!NONDEBUG_INSN_P (insn))
27251 continue;
27252 else
27253 {
27254 rtx body = PATTERN (insn);
27255 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27256 continue;
27257 /* Inline asm could occupy zero bytes. */
27258 else if (GET_CODE (body) == ASM_INPUT
27259 || asm_noperands (body) >= 0)
27260 continue;
27261 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27262 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27263 continue;
27264 #endif
27265 else
27266 {
27267 /* Assume insn has non-zero length. */
27268 maybe_at_text_label_p = false;
27269 break;
27270 }
27271 }
27272 if (maybe_at_text_label_p)
27273 {
27274 last_start = loc_note;
27275 first_loclabel_num_not_at_text_label = loclabel_num;
27276 }
27277 }
27278 }
27279
27280 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27281 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27282
27283 if (!var_loc_p)
27284 {
27285 struct call_arg_loc_node *ca_loc
27286 = ggc_cleared_alloc<call_arg_loc_node> ();
27287 rtx_insn *prev = call_insn;
27288
27289 ca_loc->call_arg_loc_note
27290 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27291 ca_loc->next = NULL;
27292 ca_loc->label = last_label;
27293 gcc_assert (prev
27294 && (CALL_P (prev)
27295 || (NONJUMP_INSN_P (prev)
27296 && GET_CODE (PATTERN (prev)) == SEQUENCE
27297 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27298 if (!CALL_P (prev))
27299 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27300 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27301
27302 /* Look for a SYMBOL_REF in the "prev" instruction. */
27303 rtx x = get_call_rtx_from (PATTERN (prev));
27304 if (x)
27305 {
27306 /* Try to get the call symbol, if any. */
27307 if (MEM_P (XEXP (x, 0)))
27308 x = XEXP (x, 0);
27309 /* First, look for a memory access to a symbol_ref. */
27310 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27311 && SYMBOL_REF_DECL (XEXP (x, 0))
27312 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27313 ca_loc->symbol_ref = XEXP (x, 0);
27314 /* Otherwise, look at a compile-time known user-level function
27315 declaration. */
27316 else if (MEM_P (x)
27317 && MEM_EXPR (x)
27318 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27319 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27320 }
27321
27322 ca_loc->block = insn_scope (prev);
27323 if (call_arg_locations)
27324 call_arg_loc_last->next = ca_loc;
27325 else
27326 call_arg_locations = ca_loc;
27327 call_arg_loc_last = ca_loc;
27328 }
27329 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27330 {
27331 newloc->label = last_label;
27332 newloc->view = view;
27333 }
27334 else
27335 {
27336 if (!last_postcall_label)
27337 {
27338 sprintf (loclabel, "%s-1", last_label);
27339 last_postcall_label = ggc_strdup (loclabel);
27340 }
27341 newloc->label = last_postcall_label;
27342 /* ??? This view is at last_label, not last_label-1, but we
27343 could only assume view at last_label-1 is zero if we could
27344 assume calls always have length greater than one. This is
27345 probably true in general, though there might be a rare
27346 exception to this rule, e.g. if a call insn is optimized out
27347 by target magic. Then, even the -1 in the label will be
27348 wrong, which might invalidate the range. Anyway, using view,
27349 though technically possibly incorrect, will work as far as
27350 ranges go: since L-1 is in the middle of the call insn,
27351 (L-1).0 and (L-1).V shouldn't make any difference, and having
27352 the loclist entry refer to the .loc entry might be useful, so
27353 leave it like this. */
27354 newloc->view = view;
27355 }
27356
27357 if (var_loc_p && flag_debug_asm)
27358 {
27359 const char *name, *sep, *patstr;
27360 if (decl && DECL_NAME (decl))
27361 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27362 else
27363 name = "";
27364 if (NOTE_VAR_LOCATION_LOC (loc_note))
27365 {
27366 sep = " => ";
27367 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27368 }
27369 else
27370 {
27371 sep = " ";
27372 patstr = "RESET";
27373 }
27374 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27375 name, sep, patstr);
27376 }
27377
27378 last_var_location_insn = next_real;
27379 last_in_cold_section_p = in_cold_section_p;
27380 }
27381
27382 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27383 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27384 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27385 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27386 BLOCK_FRAGMENT_ORIGIN links. */
27387 static bool
27388 block_within_block_p (tree block, tree outer, bool bothways)
27389 {
27390 if (block == outer)
27391 return true;
27392
27393 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27394 for (tree context = BLOCK_SUPERCONTEXT (block);
27395 context != outer;
27396 context = BLOCK_SUPERCONTEXT (context))
27397 if (!context || TREE_CODE (context) != BLOCK)
27398 return false;
27399
27400 if (!bothways)
27401 return true;
27402
27403 /* Now check that each block is actually referenced by its
27404 parent. */
27405 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27406 context = BLOCK_SUPERCONTEXT (context))
27407 {
27408 if (BLOCK_FRAGMENT_ORIGIN (context))
27409 {
27410 gcc_assert (!BLOCK_SUBBLOCKS (context));
27411 context = BLOCK_FRAGMENT_ORIGIN (context);
27412 }
27413 for (tree sub = BLOCK_SUBBLOCKS (context);
27414 sub != block;
27415 sub = BLOCK_CHAIN (sub))
27416 if (!sub)
27417 return false;
27418 if (context == outer)
27419 return true;
27420 else
27421 block = context;
27422 }
27423 }
27424
27425 /* Called during final while assembling the marker of the entry point
27426 for an inlined function. */
27427
27428 static void
27429 dwarf2out_inline_entry (tree block)
27430 {
27431 gcc_assert (debug_inline_points);
27432
27433 /* If we can't represent it, don't bother. */
27434 if (!(dwarf_version >= 3 || !dwarf_strict))
27435 return;
27436
27437 gcc_assert (DECL_P (block_ultimate_origin (block)));
27438
27439 /* Sanity check the block tree. This would catch a case in which
27440 BLOCK got removed from the tree reachable from the outermost
27441 lexical block, but got retained in markers. It would still link
27442 back to its parents, but some ancestor would be missing a link
27443 down the path to the sub BLOCK. If the block got removed, its
27444 BLOCK_NUMBER will not be a usable value. */
27445 if (flag_checking)
27446 gcc_assert (block_within_block_p (block,
27447 DECL_INITIAL (current_function_decl),
27448 true));
27449
27450 gcc_assert (inlined_function_outer_scope_p (block));
27451 gcc_assert (!BLOCK_DIE (block));
27452
27453 if (BLOCK_FRAGMENT_ORIGIN (block))
27454 block = BLOCK_FRAGMENT_ORIGIN (block);
27455 /* Can the entry point ever not be at the beginning of an
27456 unfragmented lexical block? */
27457 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27458 || (cur_line_info_table
27459 && !ZERO_VIEW_P (cur_line_info_table->view))))
27460 return;
27461
27462 if (!inline_entry_data_table)
27463 inline_entry_data_table
27464 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27465
27466
27467 inline_entry_data **iedp
27468 = inline_entry_data_table->find_slot_with_hash (block,
27469 htab_hash_pointer (block),
27470 INSERT);
27471 if (*iedp)
27472 /* ??? Ideally, we'd record all entry points for the same inlined
27473 function (some may have been duplicated by e.g. unrolling), but
27474 we have no way to represent that ATM. */
27475 return;
27476
27477 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27478 ied->block = block;
27479 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27480 ied->label_num = BLOCK_NUMBER (block);
27481 if (cur_line_info_table)
27482 ied->view = cur_line_info_table->view;
27483
27484 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27485
27486 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27487 BLOCK_NUMBER (block));
27488 ASM_OUTPUT_LABEL (asm_out_file, label);
27489 }
27490
27491 /* Called from finalize_size_functions for size functions so that their body
27492 can be encoded in the debug info to describe the layout of variable-length
27493 structures. */
27494
27495 static void
27496 dwarf2out_size_function (tree decl)
27497 {
27498 function_to_dwarf_procedure (decl);
27499 }
27500
27501 /* Note in one location list that text section has changed. */
27502
27503 int
27504 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27505 {
27506 var_loc_list *list = *slot;
27507 if (list->first)
27508 list->last_before_switch
27509 = list->last->next ? list->last->next : list->last;
27510 return 1;
27511 }
27512
27513 /* Note in all location lists that text section has changed. */
27514
27515 static void
27516 var_location_switch_text_section (void)
27517 {
27518 if (decl_loc_table == NULL)
27519 return;
27520
27521 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27522 }
27523
27524 /* Create a new line number table. */
27525
27526 static dw_line_info_table *
27527 new_line_info_table (void)
27528 {
27529 dw_line_info_table *table;
27530
27531 table = ggc_cleared_alloc<dw_line_info_table> ();
27532 table->file_num = 1;
27533 table->line_num = 1;
27534 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27535 FORCE_RESET_NEXT_VIEW (table->view);
27536 table->symviews_since_reset = 0;
27537
27538 return table;
27539 }
27540
27541 /* Lookup the "current" table into which we emit line info, so
27542 that we don't have to do it for every source line. */
27543
27544 static void
27545 set_cur_line_info_table (section *sec)
27546 {
27547 dw_line_info_table *table;
27548
27549 if (sec == text_section)
27550 table = text_section_line_info;
27551 else if (sec == cold_text_section)
27552 {
27553 table = cold_text_section_line_info;
27554 if (!table)
27555 {
27556 cold_text_section_line_info = table = new_line_info_table ();
27557 table->end_label = cold_end_label;
27558 }
27559 }
27560 else
27561 {
27562 const char *end_label;
27563
27564 if (crtl->has_bb_partition)
27565 {
27566 if (in_cold_section_p)
27567 end_label = crtl->subsections.cold_section_end_label;
27568 else
27569 end_label = crtl->subsections.hot_section_end_label;
27570 }
27571 else
27572 {
27573 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27574 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27575 current_function_funcdef_no);
27576 end_label = ggc_strdup (label);
27577 }
27578
27579 table = new_line_info_table ();
27580 table->end_label = end_label;
27581
27582 vec_safe_push (separate_line_info, table);
27583 }
27584
27585 if (output_asm_line_debug_info ())
27586 table->is_stmt = (cur_line_info_table
27587 ? cur_line_info_table->is_stmt
27588 : DWARF_LINE_DEFAULT_IS_STMT_START);
27589 cur_line_info_table = table;
27590 }
27591
27592
27593 /* We need to reset the locations at the beginning of each
27594 function. We can't do this in the end_function hook, because the
27595 declarations that use the locations won't have been output when
27596 that hook is called. Also compute have_multiple_function_sections here. */
27597
27598 static void
27599 dwarf2out_begin_function (tree fun)
27600 {
27601 section *sec = function_section (fun);
27602
27603 if (sec != text_section)
27604 have_multiple_function_sections = true;
27605
27606 if (crtl->has_bb_partition && !cold_text_section)
27607 {
27608 gcc_assert (current_function_decl == fun);
27609 cold_text_section = unlikely_text_section ();
27610 switch_to_section (cold_text_section);
27611 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27612 switch_to_section (sec);
27613 }
27614
27615 dwarf2out_note_section_used ();
27616 call_site_count = 0;
27617 tail_call_site_count = 0;
27618
27619 set_cur_line_info_table (sec);
27620 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27621 }
27622
27623 /* Helper function of dwarf2out_end_function, called only after emitting
27624 the very first function into assembly. Check if some .debug_loc range
27625 might end with a .LVL* label that could be equal to .Ltext0.
27626 In that case we must force using absolute addresses in .debug_loc ranges,
27627 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27628 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27629 list terminator.
27630 Set have_multiple_function_sections to true in that case and
27631 terminate htab traversal. */
27632
27633 int
27634 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27635 {
27636 var_loc_list *entry = *slot;
27637 struct var_loc_node *node;
27638
27639 node = entry->first;
27640 if (node && node->next && node->next->label)
27641 {
27642 unsigned int i;
27643 const char *label = node->next->label;
27644 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27645
27646 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27647 {
27648 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27649 if (strcmp (label, loclabel) == 0)
27650 {
27651 have_multiple_function_sections = true;
27652 return 0;
27653 }
27654 }
27655 }
27656 return 1;
27657 }
27658
27659 /* Hook called after emitting a function into assembly.
27660 This does something only for the very first function emitted. */
27661
27662 static void
27663 dwarf2out_end_function (unsigned int)
27664 {
27665 if (in_first_function_p
27666 && !have_multiple_function_sections
27667 && first_loclabel_num_not_at_text_label
27668 && decl_loc_table)
27669 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27670 in_first_function_p = false;
27671 maybe_at_text_label_p = false;
27672 }
27673
27674 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27675 front-ends register a translation unit even before dwarf2out_init is
27676 called. */
27677 static tree main_translation_unit = NULL_TREE;
27678
27679 /* Hook called by front-ends after they built their main translation unit.
27680 Associate comp_unit_die to UNIT. */
27681
27682 static void
27683 dwarf2out_register_main_translation_unit (tree unit)
27684 {
27685 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27686 && main_translation_unit == NULL_TREE);
27687 main_translation_unit = unit;
27688 /* If dwarf2out_init has not been called yet, it will perform the association
27689 itself looking at main_translation_unit. */
27690 if (decl_die_table != NULL)
27691 equate_decl_number_to_die (unit, comp_unit_die ());
27692 }
27693
27694 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27695
27696 static void
27697 push_dw_line_info_entry (dw_line_info_table *table,
27698 enum dw_line_info_opcode opcode, unsigned int val)
27699 {
27700 dw_line_info_entry e;
27701 e.opcode = opcode;
27702 e.val = val;
27703 vec_safe_push (table->entries, e);
27704 }
27705
27706 /* Output a label to mark the beginning of a source code line entry
27707 and record information relating to this source line, in
27708 'line_info_table' for later output of the .debug_line section. */
27709 /* ??? The discriminator parameter ought to be unsigned. */
27710
27711 static void
27712 dwarf2out_source_line (unsigned int line, unsigned int column,
27713 const char *filename,
27714 int discriminator, bool is_stmt)
27715 {
27716 unsigned int file_num;
27717 dw_line_info_table *table;
27718 static var_loc_view lvugid;
27719
27720 if (debug_info_level < DINFO_LEVEL_TERSE)
27721 return;
27722
27723 table = cur_line_info_table;
27724
27725 if (line == 0)
27726 {
27727 if (debug_variable_location_views
27728 && output_asm_line_debug_info ()
27729 && table && !RESETTING_VIEW_P (table->view))
27730 {
27731 /* If we're using the assembler to compute view numbers, we
27732 can't issue a .loc directive for line zero, so we can't
27733 get a view number at this point. We might attempt to
27734 compute it from the previous view, or equate it to a
27735 subsequent view (though it might not be there!), but
27736 since we're omitting the line number entry, we might as
27737 well omit the view number as well. That means pretending
27738 it's a view number zero, which might very well turn out
27739 to be correct. ??? Extend the assembler so that the
27740 compiler could emit e.g. ".locview .LVU#", to output a
27741 view without changing line number information. We'd then
27742 have to count it in symviews_since_reset; when it's omitted,
27743 it doesn't count. */
27744 if (!zero_view_p)
27745 zero_view_p = BITMAP_GGC_ALLOC ();
27746 bitmap_set_bit (zero_view_p, table->view);
27747 if (flag_debug_asm)
27748 {
27749 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27750 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27751 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27752 ASM_COMMENT_START);
27753 assemble_name (asm_out_file, label);
27754 putc ('\n', asm_out_file);
27755 }
27756 table->view = ++lvugid;
27757 }
27758 return;
27759 }
27760
27761 /* The discriminator column was added in dwarf4. Simplify the below
27762 by simply removing it if we're not supposed to output it. */
27763 if (dwarf_version < 4 && dwarf_strict)
27764 discriminator = 0;
27765
27766 if (!debug_column_info)
27767 column = 0;
27768
27769 file_num = maybe_emit_file (lookup_filename (filename));
27770
27771 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27772 the debugger has used the second (possibly duplicate) line number
27773 at the beginning of the function to mark the end of the prologue.
27774 We could eliminate any other duplicates within the function. For
27775 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27776 that second line number entry. */
27777 /* Recall that this end-of-prologue indication is *not* the same thing
27778 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27779 to which the hook corresponds, follows the last insn that was
27780 emitted by gen_prologue. What we need is to precede the first insn
27781 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27782 insn that corresponds to something the user wrote. These may be
27783 very different locations once scheduling is enabled. */
27784
27785 if (0 && file_num == table->file_num
27786 && line == table->line_num
27787 && column == table->column_num
27788 && discriminator == table->discrim_num
27789 && is_stmt == table->is_stmt)
27790 return;
27791
27792 switch_to_section (current_function_section ());
27793
27794 /* If requested, emit something human-readable. */
27795 if (flag_debug_asm)
27796 {
27797 if (debug_column_info)
27798 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27799 filename, line, column);
27800 else
27801 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27802 filename, line);
27803 }
27804
27805 if (output_asm_line_debug_info ())
27806 {
27807 /* Emit the .loc directive understood by GNU as. */
27808 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27809 file_num, line, is_stmt, discriminator */
27810 fputs ("\t.loc ", asm_out_file);
27811 fprint_ul (asm_out_file, file_num);
27812 putc (' ', asm_out_file);
27813 fprint_ul (asm_out_file, line);
27814 putc (' ', asm_out_file);
27815 fprint_ul (asm_out_file, column);
27816
27817 if (is_stmt != table->is_stmt)
27818 {
27819 fputs (" is_stmt ", asm_out_file);
27820 putc (is_stmt ? '1' : '0', asm_out_file);
27821 }
27822 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27823 {
27824 gcc_assert (discriminator > 0);
27825 fputs (" discriminator ", asm_out_file);
27826 fprint_ul (asm_out_file, (unsigned long) discriminator);
27827 }
27828 if (debug_variable_location_views)
27829 {
27830 if (!RESETTING_VIEW_P (table->view))
27831 {
27832 table->symviews_since_reset++;
27833 if (table->symviews_since_reset > symview_upper_bound)
27834 symview_upper_bound = table->symviews_since_reset;
27835 /* When we're using the assembler to compute view
27836 numbers, we output symbolic labels after "view" in
27837 .loc directives, and the assembler will set them for
27838 us, so that we can refer to the view numbers in
27839 location lists. The only exceptions are when we know
27840 a view will be zero: "-0" is a forced reset, used
27841 e.g. in the beginning of functions, whereas "0" tells
27842 the assembler to check that there was a PC change
27843 since the previous view, in a way that implicitly
27844 resets the next view. */
27845 fputs (" view ", asm_out_file);
27846 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27847 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27848 assemble_name (asm_out_file, label);
27849 table->view = ++lvugid;
27850 }
27851 else
27852 {
27853 table->symviews_since_reset = 0;
27854 if (FORCE_RESETTING_VIEW_P (table->view))
27855 fputs (" view -0", asm_out_file);
27856 else
27857 fputs (" view 0", asm_out_file);
27858 /* Mark the present view as a zero view. Earlier debug
27859 binds may have already added its id to loclists to be
27860 emitted later, so we can't reuse the id for something
27861 else. However, it's good to know whether a view is
27862 known to be zero, because then we may be able to
27863 optimize out locviews that are all zeros, so take
27864 note of it in zero_view_p. */
27865 if (!zero_view_p)
27866 zero_view_p = BITMAP_GGC_ALLOC ();
27867 bitmap_set_bit (zero_view_p, lvugid);
27868 table->view = ++lvugid;
27869 }
27870 }
27871 putc ('\n', asm_out_file);
27872 }
27873 else
27874 {
27875 unsigned int label_num = ++line_info_label_num;
27876
27877 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27878
27879 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27880 push_dw_line_info_entry (table, LI_adv_address, label_num);
27881 else
27882 push_dw_line_info_entry (table, LI_set_address, label_num);
27883 if (debug_variable_location_views)
27884 {
27885 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27886 if (resetting)
27887 table->view = 0;
27888
27889 if (flag_debug_asm)
27890 fprintf (asm_out_file, "\t%s view %s%d\n",
27891 ASM_COMMENT_START,
27892 resetting ? "-" : "",
27893 table->view);
27894
27895 table->view++;
27896 }
27897 if (file_num != table->file_num)
27898 push_dw_line_info_entry (table, LI_set_file, file_num);
27899 if (discriminator != table->discrim_num)
27900 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27901 if (is_stmt != table->is_stmt)
27902 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27903 push_dw_line_info_entry (table, LI_set_line, line);
27904 if (debug_column_info)
27905 push_dw_line_info_entry (table, LI_set_column, column);
27906 }
27907
27908 table->file_num = file_num;
27909 table->line_num = line;
27910 table->column_num = column;
27911 table->discrim_num = discriminator;
27912 table->is_stmt = is_stmt;
27913 table->in_use = true;
27914 }
27915
27916 /* Record the beginning of a new source file. */
27917
27918 static void
27919 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27920 {
27921 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27922 {
27923 macinfo_entry e;
27924 e.code = DW_MACINFO_start_file;
27925 e.lineno = lineno;
27926 e.info = ggc_strdup (filename);
27927 vec_safe_push (macinfo_table, e);
27928 }
27929 }
27930
27931 /* Record the end of a source file. */
27932
27933 static void
27934 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27935 {
27936 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27937 {
27938 macinfo_entry e;
27939 e.code = DW_MACINFO_end_file;
27940 e.lineno = lineno;
27941 e.info = NULL;
27942 vec_safe_push (macinfo_table, e);
27943 }
27944 }
27945
27946 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27947 the tail part of the directive line, i.e. the part which is past the
27948 initial whitespace, #, whitespace, directive-name, whitespace part. */
27949
27950 static void
27951 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27952 const char *buffer ATTRIBUTE_UNUSED)
27953 {
27954 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27955 {
27956 macinfo_entry e;
27957 /* Insert a dummy first entry to be able to optimize the whole
27958 predefined macro block using DW_MACRO_import. */
27959 if (macinfo_table->is_empty () && lineno <= 1)
27960 {
27961 e.code = 0;
27962 e.lineno = 0;
27963 e.info = NULL;
27964 vec_safe_push (macinfo_table, e);
27965 }
27966 e.code = DW_MACINFO_define;
27967 e.lineno = lineno;
27968 e.info = ggc_strdup (buffer);
27969 vec_safe_push (macinfo_table, e);
27970 }
27971 }
27972
27973 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
27974 the tail part of the directive line, i.e. the part which is past the
27975 initial whitespace, #, whitespace, directive-name, whitespace part. */
27976
27977 static void
27978 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
27979 const char *buffer ATTRIBUTE_UNUSED)
27980 {
27981 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27982 {
27983 macinfo_entry e;
27984 /* Insert a dummy first entry to be able to optimize the whole
27985 predefined macro block using DW_MACRO_import. */
27986 if (macinfo_table->is_empty () && lineno <= 1)
27987 {
27988 e.code = 0;
27989 e.lineno = 0;
27990 e.info = NULL;
27991 vec_safe_push (macinfo_table, e);
27992 }
27993 e.code = DW_MACINFO_undef;
27994 e.lineno = lineno;
27995 e.info = ggc_strdup (buffer);
27996 vec_safe_push (macinfo_table, e);
27997 }
27998 }
27999
28000 /* Helpers to manipulate hash table of CUs. */
28001
28002 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28003 {
28004 static inline hashval_t hash (const macinfo_entry *);
28005 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28006 };
28007
28008 inline hashval_t
28009 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28010 {
28011 return htab_hash_string (entry->info);
28012 }
28013
28014 inline bool
28015 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28016 const macinfo_entry *entry2)
28017 {
28018 return !strcmp (entry1->info, entry2->info);
28019 }
28020
28021 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28022
28023 /* Output a single .debug_macinfo entry. */
28024
28025 static void
28026 output_macinfo_op (macinfo_entry *ref)
28027 {
28028 int file_num;
28029 size_t len;
28030 struct indirect_string_node *node;
28031 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28032 struct dwarf_file_data *fd;
28033
28034 switch (ref->code)
28035 {
28036 case DW_MACINFO_start_file:
28037 fd = lookup_filename (ref->info);
28038 file_num = maybe_emit_file (fd);
28039 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28040 dw2_asm_output_data_uleb128 (ref->lineno,
28041 "Included from line number %lu",
28042 (unsigned long) ref->lineno);
28043 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28044 break;
28045 case DW_MACINFO_end_file:
28046 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28047 break;
28048 case DW_MACINFO_define:
28049 case DW_MACINFO_undef:
28050 len = strlen (ref->info) + 1;
28051 if (!dwarf_strict
28052 && len > DWARF_OFFSET_SIZE
28053 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28054 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28055 {
28056 ref->code = ref->code == DW_MACINFO_define
28057 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28058 output_macinfo_op (ref);
28059 return;
28060 }
28061 dw2_asm_output_data (1, ref->code,
28062 ref->code == DW_MACINFO_define
28063 ? "Define macro" : "Undefine macro");
28064 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28065 (unsigned long) ref->lineno);
28066 dw2_asm_output_nstring (ref->info, -1, "The macro");
28067 break;
28068 case DW_MACRO_define_strp:
28069 case DW_MACRO_undef_strp:
28070 node = find_AT_string (ref->info);
28071 gcc_assert (node
28072 && (node->form == DW_FORM_strp
28073 || node->form == DW_FORM_GNU_str_index));
28074 dw2_asm_output_data (1, ref->code,
28075 ref->code == DW_MACRO_define_strp
28076 ? "Define macro strp"
28077 : "Undefine macro strp");
28078 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28079 (unsigned long) ref->lineno);
28080 if (node->form == DW_FORM_strp)
28081 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28082 debug_str_section, "The macro: \"%s\"",
28083 ref->info);
28084 else
28085 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28086 ref->info);
28087 break;
28088 case DW_MACRO_import:
28089 dw2_asm_output_data (1, ref->code, "Import");
28090 ASM_GENERATE_INTERNAL_LABEL (label,
28091 DEBUG_MACRO_SECTION_LABEL,
28092 ref->lineno + macinfo_label_base);
28093 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28094 break;
28095 default:
28096 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28097 ASM_COMMENT_START, (unsigned long) ref->code);
28098 break;
28099 }
28100 }
28101
28102 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28103 other compilation unit .debug_macinfo sections. IDX is the first
28104 index of a define/undef, return the number of ops that should be
28105 emitted in a comdat .debug_macinfo section and emit
28106 a DW_MACRO_import entry referencing it.
28107 If the define/undef entry should be emitted normally, return 0. */
28108
28109 static unsigned
28110 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28111 macinfo_hash_type **macinfo_htab)
28112 {
28113 macinfo_entry *first, *second, *cur, *inc;
28114 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28115 unsigned char checksum[16];
28116 struct md5_ctx ctx;
28117 char *grp_name, *tail;
28118 const char *base;
28119 unsigned int i, count, encoded_filename_len, linebuf_len;
28120 macinfo_entry **slot;
28121
28122 first = &(*macinfo_table)[idx];
28123 second = &(*macinfo_table)[idx + 1];
28124
28125 /* Optimize only if there are at least two consecutive define/undef ops,
28126 and either all of them are before first DW_MACINFO_start_file
28127 with lineno {0,1} (i.e. predefined macro block), or all of them are
28128 in some included header file. */
28129 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28130 return 0;
28131 if (vec_safe_is_empty (files))
28132 {
28133 if (first->lineno > 1 || second->lineno > 1)
28134 return 0;
28135 }
28136 else if (first->lineno == 0)
28137 return 0;
28138
28139 /* Find the last define/undef entry that can be grouped together
28140 with first and at the same time compute md5 checksum of their
28141 codes, linenumbers and strings. */
28142 md5_init_ctx (&ctx);
28143 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28144 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28145 break;
28146 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28147 break;
28148 else
28149 {
28150 unsigned char code = cur->code;
28151 md5_process_bytes (&code, 1, &ctx);
28152 checksum_uleb128 (cur->lineno, &ctx);
28153 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28154 }
28155 md5_finish_ctx (&ctx, checksum);
28156 count = i - idx;
28157
28158 /* From the containing include filename (if any) pick up just
28159 usable characters from its basename. */
28160 if (vec_safe_is_empty (files))
28161 base = "";
28162 else
28163 base = lbasename (files->last ().info);
28164 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28165 if (ISIDNUM (base[i]) || base[i] == '.')
28166 encoded_filename_len++;
28167 /* Count . at the end. */
28168 if (encoded_filename_len)
28169 encoded_filename_len++;
28170
28171 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28172 linebuf_len = strlen (linebuf);
28173
28174 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28175 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28176 + 16 * 2 + 1);
28177 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28178 tail = grp_name + 4;
28179 if (encoded_filename_len)
28180 {
28181 for (i = 0; base[i]; i++)
28182 if (ISIDNUM (base[i]) || base[i] == '.')
28183 *tail++ = base[i];
28184 *tail++ = '.';
28185 }
28186 memcpy (tail, linebuf, linebuf_len);
28187 tail += linebuf_len;
28188 *tail++ = '.';
28189 for (i = 0; i < 16; i++)
28190 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28191
28192 /* Construct a macinfo_entry for DW_MACRO_import
28193 in the empty vector entry before the first define/undef. */
28194 inc = &(*macinfo_table)[idx - 1];
28195 inc->code = DW_MACRO_import;
28196 inc->lineno = 0;
28197 inc->info = ggc_strdup (grp_name);
28198 if (!*macinfo_htab)
28199 *macinfo_htab = new macinfo_hash_type (10);
28200 /* Avoid emitting duplicates. */
28201 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28202 if (*slot != NULL)
28203 {
28204 inc->code = 0;
28205 inc->info = NULL;
28206 /* If such an entry has been used before, just emit
28207 a DW_MACRO_import op. */
28208 inc = *slot;
28209 output_macinfo_op (inc);
28210 /* And clear all macinfo_entry in the range to avoid emitting them
28211 in the second pass. */
28212 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28213 {
28214 cur->code = 0;
28215 cur->info = NULL;
28216 }
28217 }
28218 else
28219 {
28220 *slot = inc;
28221 inc->lineno = (*macinfo_htab)->elements ();
28222 output_macinfo_op (inc);
28223 }
28224 return count;
28225 }
28226
28227 /* Save any strings needed by the macinfo table in the debug str
28228 table. All strings must be collected into the table by the time
28229 index_string is called. */
28230
28231 static void
28232 save_macinfo_strings (void)
28233 {
28234 unsigned len;
28235 unsigned i;
28236 macinfo_entry *ref;
28237
28238 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28239 {
28240 switch (ref->code)
28241 {
28242 /* Match the logic in output_macinfo_op to decide on
28243 indirect strings. */
28244 case DW_MACINFO_define:
28245 case DW_MACINFO_undef:
28246 len = strlen (ref->info) + 1;
28247 if (!dwarf_strict
28248 && len > DWARF_OFFSET_SIZE
28249 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28250 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28251 set_indirect_string (find_AT_string (ref->info));
28252 break;
28253 case DW_MACRO_define_strp:
28254 case DW_MACRO_undef_strp:
28255 set_indirect_string (find_AT_string (ref->info));
28256 break;
28257 default:
28258 break;
28259 }
28260 }
28261 }
28262
28263 /* Output macinfo section(s). */
28264
28265 static void
28266 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28267 {
28268 unsigned i;
28269 unsigned long length = vec_safe_length (macinfo_table);
28270 macinfo_entry *ref;
28271 vec<macinfo_entry, va_gc> *files = NULL;
28272 macinfo_hash_type *macinfo_htab = NULL;
28273 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28274
28275 if (! length)
28276 return;
28277
28278 /* output_macinfo* uses these interchangeably. */
28279 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28280 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28281 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28282 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28283
28284 /* AIX Assembler inserts the length, so adjust the reference to match the
28285 offset expected by debuggers. */
28286 strcpy (dl_section_ref, debug_line_label);
28287 if (XCOFF_DEBUGGING_INFO)
28288 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28289
28290 /* For .debug_macro emit the section header. */
28291 if (!dwarf_strict || dwarf_version >= 5)
28292 {
28293 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28294 "DWARF macro version number");
28295 if (DWARF_OFFSET_SIZE == 8)
28296 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28297 else
28298 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28299 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28300 debug_line_section, NULL);
28301 }
28302
28303 /* In the first loop, it emits the primary .debug_macinfo section
28304 and after each emitted op the macinfo_entry is cleared.
28305 If a longer range of define/undef ops can be optimized using
28306 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28307 the vector before the first define/undef in the range and the
28308 whole range of define/undef ops is not emitted and kept. */
28309 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28310 {
28311 switch (ref->code)
28312 {
28313 case DW_MACINFO_start_file:
28314 vec_safe_push (files, *ref);
28315 break;
28316 case DW_MACINFO_end_file:
28317 if (!vec_safe_is_empty (files))
28318 files->pop ();
28319 break;
28320 case DW_MACINFO_define:
28321 case DW_MACINFO_undef:
28322 if ((!dwarf_strict || dwarf_version >= 5)
28323 && HAVE_COMDAT_GROUP
28324 && vec_safe_length (files) != 1
28325 && i > 0
28326 && i + 1 < length
28327 && (*macinfo_table)[i - 1].code == 0)
28328 {
28329 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28330 if (count)
28331 {
28332 i += count - 1;
28333 continue;
28334 }
28335 }
28336 break;
28337 case 0:
28338 /* A dummy entry may be inserted at the beginning to be able
28339 to optimize the whole block of predefined macros. */
28340 if (i == 0)
28341 continue;
28342 default:
28343 break;
28344 }
28345 output_macinfo_op (ref);
28346 ref->info = NULL;
28347 ref->code = 0;
28348 }
28349
28350 if (!macinfo_htab)
28351 return;
28352
28353 /* Save the number of transparent includes so we can adjust the
28354 label number for the fat LTO object DWARF. */
28355 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28356
28357 delete macinfo_htab;
28358 macinfo_htab = NULL;
28359
28360 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28361 terminate the current chain and switch to a new comdat .debug_macinfo
28362 section and emit the define/undef entries within it. */
28363 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28364 switch (ref->code)
28365 {
28366 case 0:
28367 continue;
28368 case DW_MACRO_import:
28369 {
28370 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28371 tree comdat_key = get_identifier (ref->info);
28372 /* Terminate the previous .debug_macinfo section. */
28373 dw2_asm_output_data (1, 0, "End compilation unit");
28374 targetm.asm_out.named_section (debug_macinfo_section_name,
28375 SECTION_DEBUG
28376 | SECTION_LINKONCE
28377 | (early_lto_debug
28378 ? SECTION_EXCLUDE : 0),
28379 comdat_key);
28380 ASM_GENERATE_INTERNAL_LABEL (label,
28381 DEBUG_MACRO_SECTION_LABEL,
28382 ref->lineno + macinfo_label_base);
28383 ASM_OUTPUT_LABEL (asm_out_file, label);
28384 ref->code = 0;
28385 ref->info = NULL;
28386 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28387 "DWARF macro version number");
28388 if (DWARF_OFFSET_SIZE == 8)
28389 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28390 else
28391 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28392 }
28393 break;
28394 case DW_MACINFO_define:
28395 case DW_MACINFO_undef:
28396 output_macinfo_op (ref);
28397 ref->code = 0;
28398 ref->info = NULL;
28399 break;
28400 default:
28401 gcc_unreachable ();
28402 }
28403
28404 macinfo_label_base += macinfo_label_base_adj;
28405 }
28406
28407 /* Initialize the various sections and labels for dwarf output and prefix
28408 them with PREFIX if non-NULL. Returns the generation (zero based
28409 number of times function was called). */
28410
28411 static unsigned
28412 init_sections_and_labels (bool early_lto_debug)
28413 {
28414 /* As we may get called multiple times have a generation count for
28415 labels. */
28416 static unsigned generation = 0;
28417
28418 if (early_lto_debug)
28419 {
28420 if (!dwarf_split_debug_info)
28421 {
28422 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28423 SECTION_DEBUG | SECTION_EXCLUDE,
28424 NULL);
28425 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28426 SECTION_DEBUG | SECTION_EXCLUDE,
28427 NULL);
28428 debug_macinfo_section_name
28429 = ((dwarf_strict && dwarf_version < 5)
28430 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28431 debug_macinfo_section = get_section (debug_macinfo_section_name,
28432 SECTION_DEBUG
28433 | SECTION_EXCLUDE, NULL);
28434 }
28435 else
28436 {
28437 /* ??? Which of the following do we need early? */
28438 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28439 SECTION_DEBUG | SECTION_EXCLUDE,
28440 NULL);
28441 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28442 SECTION_DEBUG | SECTION_EXCLUDE,
28443 NULL);
28444 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28445 SECTION_DEBUG
28446 | SECTION_EXCLUDE, NULL);
28447 debug_skeleton_abbrev_section
28448 = get_section (DEBUG_LTO_ABBREV_SECTION,
28449 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28450 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28451 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28452 generation);
28453
28454 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28455 stay in the main .o, but the skeleton_line goes into the split
28456 off dwo. */
28457 debug_skeleton_line_section
28458 = get_section (DEBUG_LTO_LINE_SECTION,
28459 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28460 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28461 DEBUG_SKELETON_LINE_SECTION_LABEL,
28462 generation);
28463 debug_str_offsets_section
28464 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28465 SECTION_DEBUG | SECTION_EXCLUDE,
28466 NULL);
28467 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28468 DEBUG_SKELETON_INFO_SECTION_LABEL,
28469 generation);
28470 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28471 DEBUG_STR_DWO_SECTION_FLAGS,
28472 NULL);
28473 debug_macinfo_section_name
28474 = ((dwarf_strict && dwarf_version < 5)
28475 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28476 debug_macinfo_section = get_section (debug_macinfo_section_name,
28477 SECTION_DEBUG | SECTION_EXCLUDE,
28478 NULL);
28479 }
28480 /* For macro info and the file table we have to refer to a
28481 debug_line section. */
28482 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28483 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28484 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28485 DEBUG_LINE_SECTION_LABEL, generation);
28486
28487 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28488 DEBUG_STR_SECTION_FLAGS
28489 | SECTION_EXCLUDE, NULL);
28490 if (!dwarf_split_debug_info && !dwarf2out_as_loc_support)
28491 debug_line_str_section
28492 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28493 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28494 }
28495 else
28496 {
28497 if (!dwarf_split_debug_info)
28498 {
28499 debug_info_section = get_section (DEBUG_INFO_SECTION,
28500 SECTION_DEBUG, NULL);
28501 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28502 SECTION_DEBUG, NULL);
28503 debug_loc_section = get_section (dwarf_version >= 5
28504 ? DEBUG_LOCLISTS_SECTION
28505 : DEBUG_LOC_SECTION,
28506 SECTION_DEBUG, NULL);
28507 debug_macinfo_section_name
28508 = ((dwarf_strict && dwarf_version < 5)
28509 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28510 debug_macinfo_section = get_section (debug_macinfo_section_name,
28511 SECTION_DEBUG, NULL);
28512 }
28513 else
28514 {
28515 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28516 SECTION_DEBUG | SECTION_EXCLUDE,
28517 NULL);
28518 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28519 SECTION_DEBUG | SECTION_EXCLUDE,
28520 NULL);
28521 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28522 SECTION_DEBUG, NULL);
28523 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28524 SECTION_DEBUG, NULL);
28525 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28526 SECTION_DEBUG, NULL);
28527 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28528 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28529 generation);
28530
28531 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28532 stay in the main .o, but the skeleton_line goes into the
28533 split off dwo. */
28534 debug_skeleton_line_section
28535 = get_section (DEBUG_DWO_LINE_SECTION,
28536 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28537 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28538 DEBUG_SKELETON_LINE_SECTION_LABEL,
28539 generation);
28540 debug_str_offsets_section
28541 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28542 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28543 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28544 DEBUG_SKELETON_INFO_SECTION_LABEL,
28545 generation);
28546 debug_loc_section = get_section (dwarf_version >= 5
28547 ? DEBUG_DWO_LOCLISTS_SECTION
28548 : DEBUG_DWO_LOC_SECTION,
28549 SECTION_DEBUG | SECTION_EXCLUDE,
28550 NULL);
28551 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28552 DEBUG_STR_DWO_SECTION_FLAGS,
28553 NULL);
28554 debug_macinfo_section_name
28555 = ((dwarf_strict && dwarf_version < 5)
28556 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28557 debug_macinfo_section = get_section (debug_macinfo_section_name,
28558 SECTION_DEBUG | SECTION_EXCLUDE,
28559 NULL);
28560 }
28561 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28562 SECTION_DEBUG, NULL);
28563 debug_line_section = get_section (DEBUG_LINE_SECTION,
28564 SECTION_DEBUG, NULL);
28565 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28566 SECTION_DEBUG, NULL);
28567 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28568 SECTION_DEBUG, NULL);
28569 debug_str_section = get_section (DEBUG_STR_SECTION,
28570 DEBUG_STR_SECTION_FLAGS, NULL);
28571 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28572 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28573 DEBUG_STR_SECTION_FLAGS, NULL);
28574
28575 debug_ranges_section = get_section (dwarf_version >= 5
28576 ? DEBUG_RNGLISTS_SECTION
28577 : DEBUG_RANGES_SECTION,
28578 SECTION_DEBUG, NULL);
28579 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28580 SECTION_DEBUG, NULL);
28581 }
28582
28583 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28584 DEBUG_ABBREV_SECTION_LABEL, generation);
28585 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28586 DEBUG_INFO_SECTION_LABEL, generation);
28587 info_section_emitted = false;
28588 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28589 DEBUG_LINE_SECTION_LABEL, generation);
28590 /* There are up to 4 unique ranges labels per generation.
28591 See also output_rnglists. */
28592 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28593 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28594 if (dwarf_version >= 5 && dwarf_split_debug_info)
28595 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28596 DEBUG_RANGES_SECTION_LABEL,
28597 1 + generation * 4);
28598 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28599 DEBUG_ADDR_SECTION_LABEL, generation);
28600 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28601 (dwarf_strict && dwarf_version < 5)
28602 ? DEBUG_MACINFO_SECTION_LABEL
28603 : DEBUG_MACRO_SECTION_LABEL, generation);
28604 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28605 generation);
28606
28607 ++generation;
28608 return generation - 1;
28609 }
28610
28611 /* Set up for Dwarf output at the start of compilation. */
28612
28613 static void
28614 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28615 {
28616 /* Allocate the file_table. */
28617 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28618
28619 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28620 /* Allocate the decl_die_table. */
28621 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28622
28623 /* Allocate the decl_loc_table. */
28624 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28625
28626 /* Allocate the cached_dw_loc_list_table. */
28627 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28628
28629 /* Allocate the initial hunk of the decl_scope_table. */
28630 vec_alloc (decl_scope_table, 256);
28631
28632 /* Allocate the initial hunk of the abbrev_die_table. */
28633 vec_alloc (abbrev_die_table, 256);
28634 /* Zero-th entry is allocated, but unused. */
28635 abbrev_die_table->quick_push (NULL);
28636
28637 /* Allocate the dwarf_proc_stack_usage_map. */
28638 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28639
28640 /* Allocate the pubtypes and pubnames vectors. */
28641 vec_alloc (pubname_table, 32);
28642 vec_alloc (pubtype_table, 32);
28643
28644 vec_alloc (incomplete_types, 64);
28645
28646 vec_alloc (used_rtx_array, 32);
28647
28648 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28649 vec_alloc (macinfo_table, 64);
28650 #endif
28651
28652 /* If front-ends already registered a main translation unit but we were not
28653 ready to perform the association, do this now. */
28654 if (main_translation_unit != NULL_TREE)
28655 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28656 }
28657
28658 /* Called before compile () starts outputtting functions, variables
28659 and toplevel asms into assembly. */
28660
28661 static void
28662 dwarf2out_assembly_start (void)
28663 {
28664 if (text_section_line_info)
28665 return;
28666
28667 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28668 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28669 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28670 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28671 COLD_TEXT_SECTION_LABEL, 0);
28672 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28673
28674 switch_to_section (text_section);
28675 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28676 #endif
28677
28678 /* Make sure the line number table for .text always exists. */
28679 text_section_line_info = new_line_info_table ();
28680 text_section_line_info->end_label = text_end_label;
28681
28682 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28683 cur_line_info_table = text_section_line_info;
28684 #endif
28685
28686 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28687 && dwarf2out_do_cfi_asm ()
28688 && !dwarf2out_do_eh_frame ())
28689 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28690 }
28691
28692 /* A helper function for dwarf2out_finish called through
28693 htab_traverse. Assign a string its index. All strings must be
28694 collected into the table by the time index_string is called,
28695 because the indexing code relies on htab_traverse to traverse nodes
28696 in the same order for each run. */
28697
28698 int
28699 index_string (indirect_string_node **h, unsigned int *index)
28700 {
28701 indirect_string_node *node = *h;
28702
28703 find_string_form (node);
28704 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28705 {
28706 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28707 node->index = *index;
28708 *index += 1;
28709 }
28710 return 1;
28711 }
28712
28713 /* A helper function for output_indirect_strings called through
28714 htab_traverse. Output the offset to a string and update the
28715 current offset. */
28716
28717 int
28718 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28719 {
28720 indirect_string_node *node = *h;
28721
28722 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28723 {
28724 /* Assert that this node has been assigned an index. */
28725 gcc_assert (node->index != NO_INDEX_ASSIGNED
28726 && node->index != NOT_INDEXED);
28727 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28728 "indexed string 0x%x: %s", node->index, node->str);
28729 *offset += strlen (node->str) + 1;
28730 }
28731 return 1;
28732 }
28733
28734 /* A helper function for dwarf2out_finish called through
28735 htab_traverse. Output the indexed string. */
28736
28737 int
28738 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28739 {
28740 struct indirect_string_node *node = *h;
28741
28742 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28743 {
28744 /* Assert that the strings are output in the same order as their
28745 indexes were assigned. */
28746 gcc_assert (*cur_idx == node->index);
28747 assemble_string (node->str, strlen (node->str) + 1);
28748 *cur_idx += 1;
28749 }
28750 return 1;
28751 }
28752
28753 /* A helper function for dwarf2out_finish called through
28754 htab_traverse. Emit one queued .debug_str string. */
28755
28756 int
28757 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28758 {
28759 struct indirect_string_node *node = *h;
28760
28761 node->form = find_string_form (node);
28762 if (node->form == form && node->refcount > 0)
28763 {
28764 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28765 assemble_string (node->str, strlen (node->str) + 1);
28766 }
28767
28768 return 1;
28769 }
28770
28771 /* Output the indexed string table. */
28772
28773 static void
28774 output_indirect_strings (void)
28775 {
28776 switch_to_section (debug_str_section);
28777 if (!dwarf_split_debug_info)
28778 debug_str_hash->traverse<enum dwarf_form,
28779 output_indirect_string> (DW_FORM_strp);
28780 else
28781 {
28782 unsigned int offset = 0;
28783 unsigned int cur_idx = 0;
28784
28785 if (skeleton_debug_str_hash)
28786 skeleton_debug_str_hash->traverse<enum dwarf_form,
28787 output_indirect_string> (DW_FORM_strp);
28788
28789 switch_to_section (debug_str_offsets_section);
28790 debug_str_hash->traverse_noresize
28791 <unsigned int *, output_index_string_offset> (&offset);
28792 switch_to_section (debug_str_dwo_section);
28793 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28794 (&cur_idx);
28795 }
28796 }
28797
28798 /* Callback for htab_traverse to assign an index to an entry in the
28799 table, and to write that entry to the .debug_addr section. */
28800
28801 int
28802 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28803 {
28804 addr_table_entry *entry = *slot;
28805
28806 if (entry->refcount == 0)
28807 {
28808 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28809 || entry->index == NOT_INDEXED);
28810 return 1;
28811 }
28812
28813 gcc_assert (entry->index == *cur_index);
28814 (*cur_index)++;
28815
28816 switch (entry->kind)
28817 {
28818 case ate_kind_rtx:
28819 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28820 "0x%x", entry->index);
28821 break;
28822 case ate_kind_rtx_dtprel:
28823 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28824 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28825 DWARF2_ADDR_SIZE,
28826 entry->addr.rtl);
28827 fputc ('\n', asm_out_file);
28828 break;
28829 case ate_kind_label:
28830 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28831 "0x%x", entry->index);
28832 break;
28833 default:
28834 gcc_unreachable ();
28835 }
28836 return 1;
28837 }
28838
28839 /* A helper function for dwarf2out_finish. Counts the number
28840 of indexed addresses. Must match the logic of the functions
28841 output_addr_table_entry above. */
28842 int
28843 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
28844 {
28845 addr_table_entry *entry = *slot;
28846
28847 if (entry->refcount > 0)
28848 *last_idx += 1;
28849 return 1;
28850 }
28851
28852 /* Produce the .debug_addr section. */
28853
28854 static void
28855 output_addr_table (void)
28856 {
28857 unsigned int index = 0;
28858 if (addr_index_table == NULL || addr_index_table->size () == 0)
28859 return;
28860
28861 switch_to_section (debug_addr_section);
28862 addr_index_table
28863 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28864 }
28865
28866 #if ENABLE_ASSERT_CHECKING
28867 /* Verify that all marks are clear. */
28868
28869 static void
28870 verify_marks_clear (dw_die_ref die)
28871 {
28872 dw_die_ref c;
28873
28874 gcc_assert (! die->die_mark);
28875 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28876 }
28877 #endif /* ENABLE_ASSERT_CHECKING */
28878
28879 /* Clear the marks for a die and its children.
28880 Be cool if the mark isn't set. */
28881
28882 static void
28883 prune_unmark_dies (dw_die_ref die)
28884 {
28885 dw_die_ref c;
28886
28887 if (die->die_mark)
28888 die->die_mark = 0;
28889 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28890 }
28891
28892 /* Given LOC that is referenced by a DIE we're marking as used, find all
28893 referenced DWARF procedures it references and mark them as used. */
28894
28895 static void
28896 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28897 {
28898 for (; loc != NULL; loc = loc->dw_loc_next)
28899 switch (loc->dw_loc_opc)
28900 {
28901 case DW_OP_implicit_pointer:
28902 case DW_OP_convert:
28903 case DW_OP_reinterpret:
28904 case DW_OP_GNU_implicit_pointer:
28905 case DW_OP_GNU_convert:
28906 case DW_OP_GNU_reinterpret:
28907 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28908 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28909 break;
28910 case DW_OP_GNU_variable_value:
28911 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28912 {
28913 dw_die_ref ref
28914 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28915 if (ref == NULL)
28916 break;
28917 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28918 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28919 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28920 }
28921 /* FALLTHRU */
28922 case DW_OP_call2:
28923 case DW_OP_call4:
28924 case DW_OP_call_ref:
28925 case DW_OP_const_type:
28926 case DW_OP_GNU_const_type:
28927 case DW_OP_GNU_parameter_ref:
28928 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
28929 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28930 break;
28931 case DW_OP_regval_type:
28932 case DW_OP_deref_type:
28933 case DW_OP_GNU_regval_type:
28934 case DW_OP_GNU_deref_type:
28935 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
28936 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
28937 break;
28938 case DW_OP_entry_value:
28939 case DW_OP_GNU_entry_value:
28940 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
28941 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
28942 break;
28943 default:
28944 break;
28945 }
28946 }
28947
28948 /* Given DIE that we're marking as used, find any other dies
28949 it references as attributes and mark them as used. */
28950
28951 static void
28952 prune_unused_types_walk_attribs (dw_die_ref die)
28953 {
28954 dw_attr_node *a;
28955 unsigned ix;
28956
28957 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
28958 {
28959 switch (AT_class (a))
28960 {
28961 /* Make sure DWARF procedures referenced by location descriptions will
28962 get emitted. */
28963 case dw_val_class_loc:
28964 prune_unused_types_walk_loc_descr (AT_loc (a));
28965 break;
28966 case dw_val_class_loc_list:
28967 for (dw_loc_list_ref list = AT_loc_list (a);
28968 list != NULL;
28969 list = list->dw_loc_next)
28970 prune_unused_types_walk_loc_descr (list->expr);
28971 break;
28972
28973 case dw_val_class_view_list:
28974 /* This points to a loc_list in another attribute, so it's
28975 already covered. */
28976 break;
28977
28978 case dw_val_class_die_ref:
28979 /* A reference to another DIE.
28980 Make sure that it will get emitted.
28981 If it was broken out into a comdat group, don't follow it. */
28982 if (! AT_ref (a)->comdat_type_p
28983 || a->dw_attr == DW_AT_specification)
28984 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
28985 break;
28986
28987 case dw_val_class_str:
28988 /* Set the string's refcount to 0 so that prune_unused_types_mark
28989 accounts properly for it. */
28990 a->dw_attr_val.v.val_str->refcount = 0;
28991 break;
28992
28993 default:
28994 break;
28995 }
28996 }
28997 }
28998
28999 /* Mark the generic parameters and arguments children DIEs of DIE. */
29000
29001 static void
29002 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29003 {
29004 dw_die_ref c;
29005
29006 if (die == NULL || die->die_child == NULL)
29007 return;
29008 c = die->die_child;
29009 do
29010 {
29011 if (is_template_parameter (c))
29012 prune_unused_types_mark (c, 1);
29013 c = c->die_sib;
29014 } while (c && c != die->die_child);
29015 }
29016
29017 /* Mark DIE as being used. If DOKIDS is true, then walk down
29018 to DIE's children. */
29019
29020 static void
29021 prune_unused_types_mark (dw_die_ref die, int dokids)
29022 {
29023 dw_die_ref c;
29024
29025 if (die->die_mark == 0)
29026 {
29027 /* We haven't done this node yet. Mark it as used. */
29028 die->die_mark = 1;
29029 /* If this is the DIE of a generic type instantiation,
29030 mark the children DIEs that describe its generic parms and
29031 args. */
29032 prune_unused_types_mark_generic_parms_dies (die);
29033
29034 /* We also have to mark its parents as used.
29035 (But we don't want to mark our parent's kids due to this,
29036 unless it is a class.) */
29037 if (die->die_parent)
29038 prune_unused_types_mark (die->die_parent,
29039 class_scope_p (die->die_parent));
29040
29041 /* Mark any referenced nodes. */
29042 prune_unused_types_walk_attribs (die);
29043
29044 /* If this node is a specification,
29045 also mark the definition, if it exists. */
29046 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29047 prune_unused_types_mark (die->die_definition, 1);
29048 }
29049
29050 if (dokids && die->die_mark != 2)
29051 {
29052 /* We need to walk the children, but haven't done so yet.
29053 Remember that we've walked the kids. */
29054 die->die_mark = 2;
29055
29056 /* If this is an array type, we need to make sure our
29057 kids get marked, even if they're types. If we're
29058 breaking out types into comdat sections, do this
29059 for all type definitions. */
29060 if (die->die_tag == DW_TAG_array_type
29061 || (use_debug_types
29062 && is_type_die (die) && ! is_declaration_die (die)))
29063 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29064 else
29065 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29066 }
29067 }
29068
29069 /* For local classes, look if any static member functions were emitted
29070 and if so, mark them. */
29071
29072 static void
29073 prune_unused_types_walk_local_classes (dw_die_ref die)
29074 {
29075 dw_die_ref c;
29076
29077 if (die->die_mark == 2)
29078 return;
29079
29080 switch (die->die_tag)
29081 {
29082 case DW_TAG_structure_type:
29083 case DW_TAG_union_type:
29084 case DW_TAG_class_type:
29085 break;
29086
29087 case DW_TAG_subprogram:
29088 if (!get_AT_flag (die, DW_AT_declaration)
29089 || die->die_definition != NULL)
29090 prune_unused_types_mark (die, 1);
29091 return;
29092
29093 default:
29094 return;
29095 }
29096
29097 /* Mark children. */
29098 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29099 }
29100
29101 /* Walk the tree DIE and mark types that we actually use. */
29102
29103 static void
29104 prune_unused_types_walk (dw_die_ref die)
29105 {
29106 dw_die_ref c;
29107
29108 /* Don't do anything if this node is already marked and
29109 children have been marked as well. */
29110 if (die->die_mark == 2)
29111 return;
29112
29113 switch (die->die_tag)
29114 {
29115 case DW_TAG_structure_type:
29116 case DW_TAG_union_type:
29117 case DW_TAG_class_type:
29118 if (die->die_perennial_p)
29119 break;
29120
29121 for (c = die->die_parent; c; c = c->die_parent)
29122 if (c->die_tag == DW_TAG_subprogram)
29123 break;
29124
29125 /* Finding used static member functions inside of classes
29126 is needed just for local classes, because for other classes
29127 static member function DIEs with DW_AT_specification
29128 are emitted outside of the DW_TAG_*_type. If we ever change
29129 it, we'd need to call this even for non-local classes. */
29130 if (c)
29131 prune_unused_types_walk_local_classes (die);
29132
29133 /* It's a type node --- don't mark it. */
29134 return;
29135
29136 case DW_TAG_const_type:
29137 case DW_TAG_packed_type:
29138 case DW_TAG_pointer_type:
29139 case DW_TAG_reference_type:
29140 case DW_TAG_rvalue_reference_type:
29141 case DW_TAG_volatile_type:
29142 case DW_TAG_typedef:
29143 case DW_TAG_array_type:
29144 case DW_TAG_interface_type:
29145 case DW_TAG_friend:
29146 case DW_TAG_enumeration_type:
29147 case DW_TAG_subroutine_type:
29148 case DW_TAG_string_type:
29149 case DW_TAG_set_type:
29150 case DW_TAG_subrange_type:
29151 case DW_TAG_ptr_to_member_type:
29152 case DW_TAG_file_type:
29153 /* Type nodes are useful only when other DIEs reference them --- don't
29154 mark them. */
29155 /* FALLTHROUGH */
29156
29157 case DW_TAG_dwarf_procedure:
29158 /* Likewise for DWARF procedures. */
29159
29160 if (die->die_perennial_p)
29161 break;
29162
29163 return;
29164
29165 default:
29166 /* Mark everything else. */
29167 break;
29168 }
29169
29170 if (die->die_mark == 0)
29171 {
29172 die->die_mark = 1;
29173
29174 /* Now, mark any dies referenced from here. */
29175 prune_unused_types_walk_attribs (die);
29176 }
29177
29178 die->die_mark = 2;
29179
29180 /* Mark children. */
29181 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29182 }
29183
29184 /* Increment the string counts on strings referred to from DIE's
29185 attributes. */
29186
29187 static void
29188 prune_unused_types_update_strings (dw_die_ref die)
29189 {
29190 dw_attr_node *a;
29191 unsigned ix;
29192
29193 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29194 if (AT_class (a) == dw_val_class_str)
29195 {
29196 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29197 s->refcount++;
29198 /* Avoid unnecessarily putting strings that are used less than
29199 twice in the hash table. */
29200 if (s->refcount
29201 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29202 {
29203 indirect_string_node **slot
29204 = debug_str_hash->find_slot_with_hash (s->str,
29205 htab_hash_string (s->str),
29206 INSERT);
29207 gcc_assert (*slot == NULL);
29208 *slot = s;
29209 }
29210 }
29211 }
29212
29213 /* Mark DIE and its children as removed. */
29214
29215 static void
29216 mark_removed (dw_die_ref die)
29217 {
29218 dw_die_ref c;
29219 die->removed = true;
29220 FOR_EACH_CHILD (die, c, mark_removed (c));
29221 }
29222
29223 /* Remove from the tree DIE any dies that aren't marked. */
29224
29225 static void
29226 prune_unused_types_prune (dw_die_ref die)
29227 {
29228 dw_die_ref c;
29229
29230 gcc_assert (die->die_mark);
29231 prune_unused_types_update_strings (die);
29232
29233 if (! die->die_child)
29234 return;
29235
29236 c = die->die_child;
29237 do {
29238 dw_die_ref prev = c, next;
29239 for (c = c->die_sib; ! c->die_mark; c = next)
29240 if (c == die->die_child)
29241 {
29242 /* No marked children between 'prev' and the end of the list. */
29243 if (prev == c)
29244 /* No marked children at all. */
29245 die->die_child = NULL;
29246 else
29247 {
29248 prev->die_sib = c->die_sib;
29249 die->die_child = prev;
29250 }
29251 c->die_sib = NULL;
29252 mark_removed (c);
29253 return;
29254 }
29255 else
29256 {
29257 next = c->die_sib;
29258 c->die_sib = NULL;
29259 mark_removed (c);
29260 }
29261
29262 if (c != prev->die_sib)
29263 prev->die_sib = c;
29264 prune_unused_types_prune (c);
29265 } while (c != die->die_child);
29266 }
29267
29268 /* Remove dies representing declarations that we never use. */
29269
29270 static void
29271 prune_unused_types (void)
29272 {
29273 unsigned int i;
29274 limbo_die_node *node;
29275 comdat_type_node *ctnode;
29276 pubname_entry *pub;
29277 dw_die_ref base_type;
29278
29279 #if ENABLE_ASSERT_CHECKING
29280 /* All the marks should already be clear. */
29281 verify_marks_clear (comp_unit_die ());
29282 for (node = limbo_die_list; node; node = node->next)
29283 verify_marks_clear (node->die);
29284 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29285 verify_marks_clear (ctnode->root_die);
29286 #endif /* ENABLE_ASSERT_CHECKING */
29287
29288 /* Mark types that are used in global variables. */
29289 premark_types_used_by_global_vars ();
29290
29291 /* Set the mark on nodes that are actually used. */
29292 prune_unused_types_walk (comp_unit_die ());
29293 for (node = limbo_die_list; node; node = node->next)
29294 prune_unused_types_walk (node->die);
29295 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29296 {
29297 prune_unused_types_walk (ctnode->root_die);
29298 prune_unused_types_mark (ctnode->type_die, 1);
29299 }
29300
29301 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29302 are unusual in that they are pubnames that are the children of pubtypes.
29303 They should only be marked via their parent DW_TAG_enumeration_type die,
29304 not as roots in themselves. */
29305 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29306 if (pub->die->die_tag != DW_TAG_enumerator)
29307 prune_unused_types_mark (pub->die, 1);
29308 for (i = 0; base_types.iterate (i, &base_type); i++)
29309 prune_unused_types_mark (base_type, 1);
29310
29311 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29312 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29313 callees). */
29314 cgraph_node *cnode;
29315 FOR_EACH_FUNCTION (cnode)
29316 if (cnode->referred_to_p (false))
29317 {
29318 dw_die_ref die = lookup_decl_die (cnode->decl);
29319 if (die == NULL || die->die_mark)
29320 continue;
29321 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29322 if (e->caller != cnode
29323 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29324 {
29325 prune_unused_types_mark (die, 1);
29326 break;
29327 }
29328 }
29329
29330 if (debug_str_hash)
29331 debug_str_hash->empty ();
29332 if (skeleton_debug_str_hash)
29333 skeleton_debug_str_hash->empty ();
29334 prune_unused_types_prune (comp_unit_die ());
29335 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29336 {
29337 node = *pnode;
29338 if (!node->die->die_mark)
29339 *pnode = node->next;
29340 else
29341 {
29342 prune_unused_types_prune (node->die);
29343 pnode = &node->next;
29344 }
29345 }
29346 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29347 prune_unused_types_prune (ctnode->root_die);
29348
29349 /* Leave the marks clear. */
29350 prune_unmark_dies (comp_unit_die ());
29351 for (node = limbo_die_list; node; node = node->next)
29352 prune_unmark_dies (node->die);
29353 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29354 prune_unmark_dies (ctnode->root_die);
29355 }
29356
29357 /* Helpers to manipulate hash table of comdat type units. */
29358
29359 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29360 {
29361 static inline hashval_t hash (const comdat_type_node *);
29362 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29363 };
29364
29365 inline hashval_t
29366 comdat_type_hasher::hash (const comdat_type_node *type_node)
29367 {
29368 hashval_t h;
29369 memcpy (&h, type_node->signature, sizeof (h));
29370 return h;
29371 }
29372
29373 inline bool
29374 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29375 const comdat_type_node *type_node_2)
29376 {
29377 return (! memcmp (type_node_1->signature, type_node_2->signature,
29378 DWARF_TYPE_SIGNATURE_SIZE));
29379 }
29380
29381 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29382 to the location it would have been added, should we know its
29383 DECL_ASSEMBLER_NAME when we added other attributes. This will
29384 probably improve compactness of debug info, removing equivalent
29385 abbrevs, and hide any differences caused by deferring the
29386 computation of the assembler name, triggered by e.g. PCH. */
29387
29388 static inline void
29389 move_linkage_attr (dw_die_ref die)
29390 {
29391 unsigned ix = vec_safe_length (die->die_attr);
29392 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29393
29394 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29395 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29396
29397 while (--ix > 0)
29398 {
29399 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29400
29401 if (prev->dw_attr == DW_AT_decl_line
29402 || prev->dw_attr == DW_AT_decl_column
29403 || prev->dw_attr == DW_AT_name)
29404 break;
29405 }
29406
29407 if (ix != vec_safe_length (die->die_attr) - 1)
29408 {
29409 die->die_attr->pop ();
29410 die->die_attr->quick_insert (ix, linkage);
29411 }
29412 }
29413
29414 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29415 referenced from typed stack ops and count how often they are used. */
29416
29417 static void
29418 mark_base_types (dw_loc_descr_ref loc)
29419 {
29420 dw_die_ref base_type = NULL;
29421
29422 for (; loc; loc = loc->dw_loc_next)
29423 {
29424 switch (loc->dw_loc_opc)
29425 {
29426 case DW_OP_regval_type:
29427 case DW_OP_deref_type:
29428 case DW_OP_GNU_regval_type:
29429 case DW_OP_GNU_deref_type:
29430 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29431 break;
29432 case DW_OP_convert:
29433 case DW_OP_reinterpret:
29434 case DW_OP_GNU_convert:
29435 case DW_OP_GNU_reinterpret:
29436 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29437 continue;
29438 /* FALLTHRU */
29439 case DW_OP_const_type:
29440 case DW_OP_GNU_const_type:
29441 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29442 break;
29443 case DW_OP_entry_value:
29444 case DW_OP_GNU_entry_value:
29445 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29446 continue;
29447 default:
29448 continue;
29449 }
29450 gcc_assert (base_type->die_parent == comp_unit_die ());
29451 if (base_type->die_mark)
29452 base_type->die_mark++;
29453 else
29454 {
29455 base_types.safe_push (base_type);
29456 base_type->die_mark = 1;
29457 }
29458 }
29459 }
29460
29461 /* Comparison function for sorting marked base types. */
29462
29463 static int
29464 base_type_cmp (const void *x, const void *y)
29465 {
29466 dw_die_ref dx = *(const dw_die_ref *) x;
29467 dw_die_ref dy = *(const dw_die_ref *) y;
29468 unsigned int byte_size1, byte_size2;
29469 unsigned int encoding1, encoding2;
29470 unsigned int align1, align2;
29471 if (dx->die_mark > dy->die_mark)
29472 return -1;
29473 if (dx->die_mark < dy->die_mark)
29474 return 1;
29475 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29476 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29477 if (byte_size1 < byte_size2)
29478 return 1;
29479 if (byte_size1 > byte_size2)
29480 return -1;
29481 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29482 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29483 if (encoding1 < encoding2)
29484 return 1;
29485 if (encoding1 > encoding2)
29486 return -1;
29487 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29488 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29489 if (align1 < align2)
29490 return 1;
29491 if (align1 > align2)
29492 return -1;
29493 return 0;
29494 }
29495
29496 /* Move base types marked by mark_base_types as early as possible
29497 in the CU, sorted by decreasing usage count both to make the
29498 uleb128 references as small as possible and to make sure they
29499 will have die_offset already computed by calc_die_sizes when
29500 sizes of typed stack loc ops is computed. */
29501
29502 static void
29503 move_marked_base_types (void)
29504 {
29505 unsigned int i;
29506 dw_die_ref base_type, die, c;
29507
29508 if (base_types.is_empty ())
29509 return;
29510
29511 /* Sort by decreasing usage count, they will be added again in that
29512 order later on. */
29513 base_types.qsort (base_type_cmp);
29514 die = comp_unit_die ();
29515 c = die->die_child;
29516 do
29517 {
29518 dw_die_ref prev = c;
29519 c = c->die_sib;
29520 while (c->die_mark)
29521 {
29522 remove_child_with_prev (c, prev);
29523 /* As base types got marked, there must be at least
29524 one node other than DW_TAG_base_type. */
29525 gcc_assert (die->die_child != NULL);
29526 c = prev->die_sib;
29527 }
29528 }
29529 while (c != die->die_child);
29530 gcc_assert (die->die_child);
29531 c = die->die_child;
29532 for (i = 0; base_types.iterate (i, &base_type); i++)
29533 {
29534 base_type->die_mark = 0;
29535 base_type->die_sib = c->die_sib;
29536 c->die_sib = base_type;
29537 c = base_type;
29538 }
29539 }
29540
29541 /* Helper function for resolve_addr, attempt to resolve
29542 one CONST_STRING, return true if successful. Similarly verify that
29543 SYMBOL_REFs refer to variables emitted in the current CU. */
29544
29545 static bool
29546 resolve_one_addr (rtx *addr)
29547 {
29548 rtx rtl = *addr;
29549
29550 if (GET_CODE (rtl) == CONST_STRING)
29551 {
29552 size_t len = strlen (XSTR (rtl, 0)) + 1;
29553 tree t = build_string (len, XSTR (rtl, 0));
29554 tree tlen = size_int (len - 1);
29555 TREE_TYPE (t)
29556 = build_array_type (char_type_node, build_index_type (tlen));
29557 rtl = lookup_constant_def (t);
29558 if (!rtl || !MEM_P (rtl))
29559 return false;
29560 rtl = XEXP (rtl, 0);
29561 if (GET_CODE (rtl) == SYMBOL_REF
29562 && SYMBOL_REF_DECL (rtl)
29563 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29564 return false;
29565 vec_safe_push (used_rtx_array, rtl);
29566 *addr = rtl;
29567 return true;
29568 }
29569
29570 if (GET_CODE (rtl) == SYMBOL_REF
29571 && SYMBOL_REF_DECL (rtl))
29572 {
29573 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29574 {
29575 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29576 return false;
29577 }
29578 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29579 return false;
29580 }
29581
29582 if (GET_CODE (rtl) == CONST)
29583 {
29584 subrtx_ptr_iterator::array_type array;
29585 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29586 if (!resolve_one_addr (*iter))
29587 return false;
29588 }
29589
29590 return true;
29591 }
29592
29593 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29594 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29595 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29596
29597 static rtx
29598 string_cst_pool_decl (tree t)
29599 {
29600 rtx rtl = output_constant_def (t, 1);
29601 unsigned char *array;
29602 dw_loc_descr_ref l;
29603 tree decl;
29604 size_t len;
29605 dw_die_ref ref;
29606
29607 if (!rtl || !MEM_P (rtl))
29608 return NULL_RTX;
29609 rtl = XEXP (rtl, 0);
29610 if (GET_CODE (rtl) != SYMBOL_REF
29611 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29612 return NULL_RTX;
29613
29614 decl = SYMBOL_REF_DECL (rtl);
29615 if (!lookup_decl_die (decl))
29616 {
29617 len = TREE_STRING_LENGTH (t);
29618 vec_safe_push (used_rtx_array, rtl);
29619 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29620 array = ggc_vec_alloc<unsigned char> (len);
29621 memcpy (array, TREE_STRING_POINTER (t), len);
29622 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29623 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29624 l->dw_loc_oprnd2.v.val_vec.length = len;
29625 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29626 l->dw_loc_oprnd2.v.val_vec.array = array;
29627 add_AT_loc (ref, DW_AT_location, l);
29628 equate_decl_number_to_die (decl, ref);
29629 }
29630 return rtl;
29631 }
29632
29633 /* Helper function of resolve_addr_in_expr. LOC is
29634 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29635 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29636 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29637 with DW_OP_implicit_pointer if possible
29638 and return true, if unsuccessful, return false. */
29639
29640 static bool
29641 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29642 {
29643 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29644 HOST_WIDE_INT offset = 0;
29645 dw_die_ref ref = NULL;
29646 tree decl;
29647
29648 if (GET_CODE (rtl) == CONST
29649 && GET_CODE (XEXP (rtl, 0)) == PLUS
29650 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29651 {
29652 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29653 rtl = XEXP (XEXP (rtl, 0), 0);
29654 }
29655 if (GET_CODE (rtl) == CONST_STRING)
29656 {
29657 size_t len = strlen (XSTR (rtl, 0)) + 1;
29658 tree t = build_string (len, XSTR (rtl, 0));
29659 tree tlen = size_int (len - 1);
29660
29661 TREE_TYPE (t)
29662 = build_array_type (char_type_node, build_index_type (tlen));
29663 rtl = string_cst_pool_decl (t);
29664 if (!rtl)
29665 return false;
29666 }
29667 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29668 {
29669 decl = SYMBOL_REF_DECL (rtl);
29670 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29671 {
29672 ref = lookup_decl_die (decl);
29673 if (ref && (get_AT (ref, DW_AT_location)
29674 || get_AT (ref, DW_AT_const_value)))
29675 {
29676 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29677 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29678 loc->dw_loc_oprnd1.val_entry = NULL;
29679 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29680 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29681 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29682 loc->dw_loc_oprnd2.v.val_int = offset;
29683 return true;
29684 }
29685 }
29686 }
29687 return false;
29688 }
29689
29690 /* Helper function for resolve_addr, handle one location
29691 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29692 the location list couldn't be resolved. */
29693
29694 static bool
29695 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29696 {
29697 dw_loc_descr_ref keep = NULL;
29698 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29699 switch (loc->dw_loc_opc)
29700 {
29701 case DW_OP_addr:
29702 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29703 {
29704 if ((prev == NULL
29705 || prev->dw_loc_opc == DW_OP_piece
29706 || prev->dw_loc_opc == DW_OP_bit_piece)
29707 && loc->dw_loc_next
29708 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29709 && (!dwarf_strict || dwarf_version >= 5)
29710 && optimize_one_addr_into_implicit_ptr (loc))
29711 break;
29712 return false;
29713 }
29714 break;
29715 case DW_OP_GNU_addr_index:
29716 case DW_OP_addrx:
29717 case DW_OP_GNU_const_index:
29718 case DW_OP_constx:
29719 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29720 || loc->dw_loc_opc == DW_OP_addrx)
29721 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29722 || loc->dw_loc_opc == DW_OP_constx)
29723 && loc->dtprel))
29724 {
29725 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29726 if (!resolve_one_addr (&rtl))
29727 return false;
29728 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29729 loc->dw_loc_oprnd1.val_entry
29730 = add_addr_table_entry (rtl, ate_kind_rtx);
29731 }
29732 break;
29733 case DW_OP_const4u:
29734 case DW_OP_const8u:
29735 if (loc->dtprel
29736 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29737 return false;
29738 break;
29739 case DW_OP_plus_uconst:
29740 if (size_of_loc_descr (loc)
29741 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29742 + 1
29743 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29744 {
29745 dw_loc_descr_ref repl
29746 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29747 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29748 add_loc_descr (&repl, loc->dw_loc_next);
29749 *loc = *repl;
29750 }
29751 break;
29752 case DW_OP_implicit_value:
29753 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29754 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29755 return false;
29756 break;
29757 case DW_OP_implicit_pointer:
29758 case DW_OP_GNU_implicit_pointer:
29759 case DW_OP_GNU_parameter_ref:
29760 case DW_OP_GNU_variable_value:
29761 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29762 {
29763 dw_die_ref ref
29764 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29765 if (ref == NULL)
29766 return false;
29767 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29768 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29769 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29770 }
29771 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29772 {
29773 if (prev == NULL
29774 && loc->dw_loc_next == NULL
29775 && AT_class (a) == dw_val_class_loc)
29776 switch (a->dw_attr)
29777 {
29778 /* Following attributes allow both exprloc and reference,
29779 so if the whole expression is DW_OP_GNU_variable_value
29780 alone we could transform it into reference. */
29781 case DW_AT_byte_size:
29782 case DW_AT_bit_size:
29783 case DW_AT_lower_bound:
29784 case DW_AT_upper_bound:
29785 case DW_AT_bit_stride:
29786 case DW_AT_count:
29787 case DW_AT_allocated:
29788 case DW_AT_associated:
29789 case DW_AT_byte_stride:
29790 a->dw_attr_val.val_class = dw_val_class_die_ref;
29791 a->dw_attr_val.val_entry = NULL;
29792 a->dw_attr_val.v.val_die_ref.die
29793 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29794 a->dw_attr_val.v.val_die_ref.external = 0;
29795 return true;
29796 default:
29797 break;
29798 }
29799 if (dwarf_strict)
29800 return false;
29801 }
29802 break;
29803 case DW_OP_const_type:
29804 case DW_OP_regval_type:
29805 case DW_OP_deref_type:
29806 case DW_OP_convert:
29807 case DW_OP_reinterpret:
29808 case DW_OP_GNU_const_type:
29809 case DW_OP_GNU_regval_type:
29810 case DW_OP_GNU_deref_type:
29811 case DW_OP_GNU_convert:
29812 case DW_OP_GNU_reinterpret:
29813 while (loc->dw_loc_next
29814 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29815 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29816 {
29817 dw_die_ref base1, base2;
29818 unsigned enc1, enc2, size1, size2;
29819 if (loc->dw_loc_opc == DW_OP_regval_type
29820 || loc->dw_loc_opc == DW_OP_deref_type
29821 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29822 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29823 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29824 else if (loc->dw_loc_oprnd1.val_class
29825 == dw_val_class_unsigned_const)
29826 break;
29827 else
29828 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29829 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29830 == dw_val_class_unsigned_const)
29831 break;
29832 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29833 gcc_assert (base1->die_tag == DW_TAG_base_type
29834 && base2->die_tag == DW_TAG_base_type);
29835 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29836 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29837 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29838 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29839 if (size1 == size2
29840 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29841 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29842 && loc != keep)
29843 || enc1 == enc2))
29844 {
29845 /* Optimize away next DW_OP_convert after
29846 adjusting LOC's base type die reference. */
29847 if (loc->dw_loc_opc == DW_OP_regval_type
29848 || loc->dw_loc_opc == DW_OP_deref_type
29849 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29850 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29851 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29852 else
29853 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29854 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29855 continue;
29856 }
29857 /* Don't change integer DW_OP_convert after e.g. floating
29858 point typed stack entry. */
29859 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29860 keep = loc->dw_loc_next;
29861 break;
29862 }
29863 break;
29864 default:
29865 break;
29866 }
29867 return true;
29868 }
29869
29870 /* Helper function of resolve_addr. DIE had DW_AT_location of
29871 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29872 and DW_OP_addr couldn't be resolved. resolve_addr has already
29873 removed the DW_AT_location attribute. This function attempts to
29874 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29875 to it or DW_AT_const_value attribute, if possible. */
29876
29877 static void
29878 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29879 {
29880 if (!VAR_P (decl)
29881 || lookup_decl_die (decl) != die
29882 || DECL_EXTERNAL (decl)
29883 || !TREE_STATIC (decl)
29884 || DECL_INITIAL (decl) == NULL_TREE
29885 || DECL_P (DECL_INITIAL (decl))
29886 || get_AT (die, DW_AT_const_value))
29887 return;
29888
29889 tree init = DECL_INITIAL (decl);
29890 HOST_WIDE_INT offset = 0;
29891 /* For variables that have been optimized away and thus
29892 don't have a memory location, see if we can emit
29893 DW_AT_const_value instead. */
29894 if (tree_add_const_value_attribute (die, init))
29895 return;
29896 if (dwarf_strict && dwarf_version < 5)
29897 return;
29898 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29899 and ADDR_EXPR refers to a decl that has DW_AT_location or
29900 DW_AT_const_value (but isn't addressable, otherwise
29901 resolving the original DW_OP_addr wouldn't fail), see if
29902 we can add DW_OP_implicit_pointer. */
29903 STRIP_NOPS (init);
29904 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29905 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29906 {
29907 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29908 init = TREE_OPERAND (init, 0);
29909 STRIP_NOPS (init);
29910 }
29911 if (TREE_CODE (init) != ADDR_EXPR)
29912 return;
29913 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29914 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
29915 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
29916 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
29917 && TREE_OPERAND (init, 0) != decl))
29918 {
29919 dw_die_ref ref;
29920 dw_loc_descr_ref l;
29921
29922 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
29923 {
29924 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
29925 if (!rtl)
29926 return;
29927 decl = SYMBOL_REF_DECL (rtl);
29928 }
29929 else
29930 decl = TREE_OPERAND (init, 0);
29931 ref = lookup_decl_die (decl);
29932 if (ref == NULL
29933 || (!get_AT (ref, DW_AT_location)
29934 && !get_AT (ref, DW_AT_const_value)))
29935 return;
29936 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
29937 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29938 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
29939 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
29940 add_AT_loc (die, DW_AT_location, l);
29941 }
29942 }
29943
29944 /* Return NULL if l is a DWARF expression, or first op that is not
29945 valid DWARF expression. */
29946
29947 static dw_loc_descr_ref
29948 non_dwarf_expression (dw_loc_descr_ref l)
29949 {
29950 while (l)
29951 {
29952 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
29953 return l;
29954 switch (l->dw_loc_opc)
29955 {
29956 case DW_OP_regx:
29957 case DW_OP_implicit_value:
29958 case DW_OP_stack_value:
29959 case DW_OP_implicit_pointer:
29960 case DW_OP_GNU_implicit_pointer:
29961 case DW_OP_GNU_parameter_ref:
29962 case DW_OP_piece:
29963 case DW_OP_bit_piece:
29964 return l;
29965 default:
29966 break;
29967 }
29968 l = l->dw_loc_next;
29969 }
29970 return NULL;
29971 }
29972
29973 /* Return adjusted copy of EXPR:
29974 If it is empty DWARF expression, return it.
29975 If it is valid non-empty DWARF expression,
29976 return copy of EXPR with DW_OP_deref appended to it.
29977 If it is DWARF expression followed by DW_OP_reg{N,x}, return
29978 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
29979 If it is DWARF expression followed by DW_OP_stack_value, return
29980 copy of the DWARF expression without anything appended.
29981 Otherwise, return NULL. */
29982
29983 static dw_loc_descr_ref
29984 copy_deref_exprloc (dw_loc_descr_ref expr)
29985 {
29986 dw_loc_descr_ref tail = NULL;
29987
29988 if (expr == NULL)
29989 return NULL;
29990
29991 dw_loc_descr_ref l = non_dwarf_expression (expr);
29992 if (l && l->dw_loc_next)
29993 return NULL;
29994
29995 if (l)
29996 {
29997 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
29998 tail = new_loc_descr ((enum dwarf_location_atom)
29999 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30000 0, 0);
30001 else
30002 switch (l->dw_loc_opc)
30003 {
30004 case DW_OP_regx:
30005 tail = new_loc_descr (DW_OP_bregx,
30006 l->dw_loc_oprnd1.v.val_unsigned, 0);
30007 break;
30008 case DW_OP_stack_value:
30009 break;
30010 default:
30011 return NULL;
30012 }
30013 }
30014 else
30015 tail = new_loc_descr (DW_OP_deref, 0, 0);
30016
30017 dw_loc_descr_ref ret = NULL, *p = &ret;
30018 while (expr != l)
30019 {
30020 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30021 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30022 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30023 p = &(*p)->dw_loc_next;
30024 expr = expr->dw_loc_next;
30025 }
30026 *p = tail;
30027 return ret;
30028 }
30029
30030 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30031 reference to a variable or argument, adjust it if needed and return:
30032 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30033 attribute if present should be removed
30034 0 keep the attribute perhaps with minor modifications, no need to rescan
30035 1 if the attribute has been successfully adjusted. */
30036
30037 static int
30038 optimize_string_length (dw_attr_node *a)
30039 {
30040 dw_loc_descr_ref l = AT_loc (a), lv;
30041 dw_die_ref die;
30042 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30043 {
30044 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30045 die = lookup_decl_die (decl);
30046 if (die)
30047 {
30048 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30049 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30050 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30051 }
30052 else
30053 return -1;
30054 }
30055 else
30056 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30057
30058 /* DWARF5 allows reference class, so we can then reference the DIE.
30059 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30060 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30061 {
30062 a->dw_attr_val.val_class = dw_val_class_die_ref;
30063 a->dw_attr_val.val_entry = NULL;
30064 a->dw_attr_val.v.val_die_ref.die = die;
30065 a->dw_attr_val.v.val_die_ref.external = 0;
30066 return 0;
30067 }
30068
30069 dw_attr_node *av = get_AT (die, DW_AT_location);
30070 dw_loc_list_ref d;
30071 bool non_dwarf_expr = false;
30072
30073 if (av == NULL)
30074 return dwarf_strict ? -1 : 0;
30075 switch (AT_class (av))
30076 {
30077 case dw_val_class_loc_list:
30078 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30079 if (d->expr && non_dwarf_expression (d->expr))
30080 non_dwarf_expr = true;
30081 break;
30082 case dw_val_class_view_list:
30083 gcc_unreachable ();
30084 case dw_val_class_loc:
30085 lv = AT_loc (av);
30086 if (lv == NULL)
30087 return dwarf_strict ? -1 : 0;
30088 if (non_dwarf_expression (lv))
30089 non_dwarf_expr = true;
30090 break;
30091 default:
30092 return dwarf_strict ? -1 : 0;
30093 }
30094
30095 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30096 into DW_OP_call4 or DW_OP_GNU_variable_value into
30097 DW_OP_call4 DW_OP_deref, do so. */
30098 if (!non_dwarf_expr
30099 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30100 {
30101 l->dw_loc_opc = DW_OP_call4;
30102 if (l->dw_loc_next)
30103 l->dw_loc_next = NULL;
30104 else
30105 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30106 return 0;
30107 }
30108
30109 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30110 copy over the DW_AT_location attribute from die to a. */
30111 if (l->dw_loc_next != NULL)
30112 {
30113 a->dw_attr_val = av->dw_attr_val;
30114 return 1;
30115 }
30116
30117 dw_loc_list_ref list, *p;
30118 switch (AT_class (av))
30119 {
30120 case dw_val_class_loc_list:
30121 p = &list;
30122 list = NULL;
30123 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30124 {
30125 lv = copy_deref_exprloc (d->expr);
30126 if (lv)
30127 {
30128 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30129 p = &(*p)->dw_loc_next;
30130 }
30131 else if (!dwarf_strict && d->expr)
30132 return 0;
30133 }
30134 if (list == NULL)
30135 return dwarf_strict ? -1 : 0;
30136 a->dw_attr_val.val_class = dw_val_class_loc_list;
30137 gen_llsym (list);
30138 *AT_loc_list_ptr (a) = list;
30139 return 1;
30140 case dw_val_class_loc:
30141 lv = copy_deref_exprloc (AT_loc (av));
30142 if (lv == NULL)
30143 return dwarf_strict ? -1 : 0;
30144 a->dw_attr_val.v.val_loc = lv;
30145 return 1;
30146 default:
30147 gcc_unreachable ();
30148 }
30149 }
30150
30151 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30152 an address in .rodata section if the string literal is emitted there,
30153 or remove the containing location list or replace DW_AT_const_value
30154 with DW_AT_location and empty location expression, if it isn't found
30155 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30156 to something that has been emitted in the current CU. */
30157
30158 static void
30159 resolve_addr (dw_die_ref die)
30160 {
30161 dw_die_ref c;
30162 dw_attr_node *a;
30163 dw_loc_list_ref *curr, *start, loc;
30164 unsigned ix;
30165 bool remove_AT_byte_size = false;
30166
30167 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30168 switch (AT_class (a))
30169 {
30170 case dw_val_class_loc_list:
30171 start = curr = AT_loc_list_ptr (a);
30172 loc = *curr;
30173 gcc_assert (loc);
30174 /* The same list can be referenced more than once. See if we have
30175 already recorded the result from a previous pass. */
30176 if (loc->replaced)
30177 *curr = loc->dw_loc_next;
30178 else if (!loc->resolved_addr)
30179 {
30180 /* As things stand, we do not expect or allow one die to
30181 reference a suffix of another die's location list chain.
30182 References must be identical or completely separate.
30183 There is therefore no need to cache the result of this
30184 pass on any list other than the first; doing so
30185 would lead to unnecessary writes. */
30186 while (*curr)
30187 {
30188 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30189 if (!resolve_addr_in_expr (a, (*curr)->expr))
30190 {
30191 dw_loc_list_ref next = (*curr)->dw_loc_next;
30192 dw_loc_descr_ref l = (*curr)->expr;
30193
30194 if (next && (*curr)->ll_symbol)
30195 {
30196 gcc_assert (!next->ll_symbol);
30197 next->ll_symbol = (*curr)->ll_symbol;
30198 next->vl_symbol = (*curr)->vl_symbol;
30199 }
30200 if (dwarf_split_debug_info)
30201 remove_loc_list_addr_table_entries (l);
30202 *curr = next;
30203 }
30204 else
30205 {
30206 mark_base_types ((*curr)->expr);
30207 curr = &(*curr)->dw_loc_next;
30208 }
30209 }
30210 if (loc == *start)
30211 loc->resolved_addr = 1;
30212 else
30213 {
30214 loc->replaced = 1;
30215 loc->dw_loc_next = *start;
30216 }
30217 }
30218 if (!*start)
30219 {
30220 remove_AT (die, a->dw_attr);
30221 ix--;
30222 }
30223 break;
30224 case dw_val_class_view_list:
30225 {
30226 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30227 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30228 dw_val_node *llnode
30229 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30230 /* If we no longer have a loclist, or it no longer needs
30231 views, drop this attribute. */
30232 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30233 {
30234 remove_AT (die, a->dw_attr);
30235 ix--;
30236 }
30237 break;
30238 }
30239 case dw_val_class_loc:
30240 {
30241 dw_loc_descr_ref l = AT_loc (a);
30242 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30243 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30244 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30245 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30246 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30247 with DW_FORM_ref referencing the same DIE as
30248 DW_OP_GNU_variable_value used to reference. */
30249 if (a->dw_attr == DW_AT_string_length
30250 && l
30251 && l->dw_loc_opc == DW_OP_GNU_variable_value
30252 && (l->dw_loc_next == NULL
30253 || (l->dw_loc_next->dw_loc_next == NULL
30254 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30255 {
30256 switch (optimize_string_length (a))
30257 {
30258 case -1:
30259 remove_AT (die, a->dw_attr);
30260 ix--;
30261 /* If we drop DW_AT_string_length, we need to drop also
30262 DW_AT_{string_length_,}byte_size. */
30263 remove_AT_byte_size = true;
30264 continue;
30265 default:
30266 break;
30267 case 1:
30268 /* Even if we keep the optimized DW_AT_string_length,
30269 it might have changed AT_class, so process it again. */
30270 ix--;
30271 continue;
30272 }
30273 }
30274 /* For -gdwarf-2 don't attempt to optimize
30275 DW_AT_data_member_location containing
30276 DW_OP_plus_uconst - older consumers might
30277 rely on it being that op instead of a more complex,
30278 but shorter, location description. */
30279 if ((dwarf_version > 2
30280 || a->dw_attr != DW_AT_data_member_location
30281 || l == NULL
30282 || l->dw_loc_opc != DW_OP_plus_uconst
30283 || l->dw_loc_next != NULL)
30284 && !resolve_addr_in_expr (a, l))
30285 {
30286 if (dwarf_split_debug_info)
30287 remove_loc_list_addr_table_entries (l);
30288 if (l != NULL
30289 && l->dw_loc_next == NULL
30290 && l->dw_loc_opc == DW_OP_addr
30291 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30292 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30293 && a->dw_attr == DW_AT_location)
30294 {
30295 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30296 remove_AT (die, a->dw_attr);
30297 ix--;
30298 optimize_location_into_implicit_ptr (die, decl);
30299 break;
30300 }
30301 if (a->dw_attr == DW_AT_string_length)
30302 /* If we drop DW_AT_string_length, we need to drop also
30303 DW_AT_{string_length_,}byte_size. */
30304 remove_AT_byte_size = true;
30305 remove_AT (die, a->dw_attr);
30306 ix--;
30307 }
30308 else
30309 mark_base_types (l);
30310 }
30311 break;
30312 case dw_val_class_addr:
30313 if (a->dw_attr == DW_AT_const_value
30314 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30315 {
30316 if (AT_index (a) != NOT_INDEXED)
30317 remove_addr_table_entry (a->dw_attr_val.val_entry);
30318 remove_AT (die, a->dw_attr);
30319 ix--;
30320 }
30321 if ((die->die_tag == DW_TAG_call_site
30322 && a->dw_attr == DW_AT_call_origin)
30323 || (die->die_tag == DW_TAG_GNU_call_site
30324 && a->dw_attr == DW_AT_abstract_origin))
30325 {
30326 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30327 dw_die_ref tdie = lookup_decl_die (tdecl);
30328 dw_die_ref cdie;
30329 if (tdie == NULL
30330 && DECL_EXTERNAL (tdecl)
30331 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30332 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30333 {
30334 dw_die_ref pdie = cdie;
30335 /* Make sure we don't add these DIEs into type units.
30336 We could emit skeleton DIEs for context (namespaces,
30337 outer structs/classes) and a skeleton DIE for the
30338 innermost context with DW_AT_signature pointing to the
30339 type unit. See PR78835. */
30340 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30341 pdie = pdie->die_parent;
30342 if (pdie == NULL)
30343 {
30344 /* Creating a full DIE for tdecl is overly expensive and
30345 at this point even wrong when in the LTO phase
30346 as it can end up generating new type DIEs we didn't
30347 output and thus optimize_external_refs will crash. */
30348 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30349 add_AT_flag (tdie, DW_AT_external, 1);
30350 add_AT_flag (tdie, DW_AT_declaration, 1);
30351 add_linkage_attr (tdie, tdecl);
30352 add_name_and_src_coords_attributes (tdie, tdecl, true);
30353 equate_decl_number_to_die (tdecl, tdie);
30354 }
30355 }
30356 if (tdie)
30357 {
30358 a->dw_attr_val.val_class = dw_val_class_die_ref;
30359 a->dw_attr_val.v.val_die_ref.die = tdie;
30360 a->dw_attr_val.v.val_die_ref.external = 0;
30361 }
30362 else
30363 {
30364 if (AT_index (a) != NOT_INDEXED)
30365 remove_addr_table_entry (a->dw_attr_val.val_entry);
30366 remove_AT (die, a->dw_attr);
30367 ix--;
30368 }
30369 }
30370 break;
30371 default:
30372 break;
30373 }
30374
30375 if (remove_AT_byte_size)
30376 remove_AT (die, dwarf_version >= 5
30377 ? DW_AT_string_length_byte_size
30378 : DW_AT_byte_size);
30379
30380 FOR_EACH_CHILD (die, c, resolve_addr (c));
30381 }
30382 \f
30383 /* Helper routines for optimize_location_lists.
30384 This pass tries to share identical local lists in .debug_loc
30385 section. */
30386
30387 /* Iteratively hash operands of LOC opcode into HSTATE. */
30388
30389 static void
30390 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30391 {
30392 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30393 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30394
30395 switch (loc->dw_loc_opc)
30396 {
30397 case DW_OP_const4u:
30398 case DW_OP_const8u:
30399 if (loc->dtprel)
30400 goto hash_addr;
30401 /* FALLTHRU */
30402 case DW_OP_const1u:
30403 case DW_OP_const1s:
30404 case DW_OP_const2u:
30405 case DW_OP_const2s:
30406 case DW_OP_const4s:
30407 case DW_OP_const8s:
30408 case DW_OP_constu:
30409 case DW_OP_consts:
30410 case DW_OP_pick:
30411 case DW_OP_plus_uconst:
30412 case DW_OP_breg0:
30413 case DW_OP_breg1:
30414 case DW_OP_breg2:
30415 case DW_OP_breg3:
30416 case DW_OP_breg4:
30417 case DW_OP_breg5:
30418 case DW_OP_breg6:
30419 case DW_OP_breg7:
30420 case DW_OP_breg8:
30421 case DW_OP_breg9:
30422 case DW_OP_breg10:
30423 case DW_OP_breg11:
30424 case DW_OP_breg12:
30425 case DW_OP_breg13:
30426 case DW_OP_breg14:
30427 case DW_OP_breg15:
30428 case DW_OP_breg16:
30429 case DW_OP_breg17:
30430 case DW_OP_breg18:
30431 case DW_OP_breg19:
30432 case DW_OP_breg20:
30433 case DW_OP_breg21:
30434 case DW_OP_breg22:
30435 case DW_OP_breg23:
30436 case DW_OP_breg24:
30437 case DW_OP_breg25:
30438 case DW_OP_breg26:
30439 case DW_OP_breg27:
30440 case DW_OP_breg28:
30441 case DW_OP_breg29:
30442 case DW_OP_breg30:
30443 case DW_OP_breg31:
30444 case DW_OP_regx:
30445 case DW_OP_fbreg:
30446 case DW_OP_piece:
30447 case DW_OP_deref_size:
30448 case DW_OP_xderef_size:
30449 hstate.add_object (val1->v.val_int);
30450 break;
30451 case DW_OP_skip:
30452 case DW_OP_bra:
30453 {
30454 int offset;
30455
30456 gcc_assert (val1->val_class == dw_val_class_loc);
30457 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30458 hstate.add_object (offset);
30459 }
30460 break;
30461 case DW_OP_implicit_value:
30462 hstate.add_object (val1->v.val_unsigned);
30463 switch (val2->val_class)
30464 {
30465 case dw_val_class_const:
30466 hstate.add_object (val2->v.val_int);
30467 break;
30468 case dw_val_class_vec:
30469 {
30470 unsigned int elt_size = val2->v.val_vec.elt_size;
30471 unsigned int len = val2->v.val_vec.length;
30472
30473 hstate.add_int (elt_size);
30474 hstate.add_int (len);
30475 hstate.add (val2->v.val_vec.array, len * elt_size);
30476 }
30477 break;
30478 case dw_val_class_const_double:
30479 hstate.add_object (val2->v.val_double.low);
30480 hstate.add_object (val2->v.val_double.high);
30481 break;
30482 case dw_val_class_wide_int:
30483 hstate.add (val2->v.val_wide->get_val (),
30484 get_full_len (*val2->v.val_wide)
30485 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30486 break;
30487 case dw_val_class_addr:
30488 inchash::add_rtx (val2->v.val_addr, hstate);
30489 break;
30490 default:
30491 gcc_unreachable ();
30492 }
30493 break;
30494 case DW_OP_bregx:
30495 case DW_OP_bit_piece:
30496 hstate.add_object (val1->v.val_int);
30497 hstate.add_object (val2->v.val_int);
30498 break;
30499 case DW_OP_addr:
30500 hash_addr:
30501 if (loc->dtprel)
30502 {
30503 unsigned char dtprel = 0xd1;
30504 hstate.add_object (dtprel);
30505 }
30506 inchash::add_rtx (val1->v.val_addr, hstate);
30507 break;
30508 case DW_OP_GNU_addr_index:
30509 case DW_OP_addrx:
30510 case DW_OP_GNU_const_index:
30511 case DW_OP_constx:
30512 {
30513 if (loc->dtprel)
30514 {
30515 unsigned char dtprel = 0xd1;
30516 hstate.add_object (dtprel);
30517 }
30518 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30519 }
30520 break;
30521 case DW_OP_implicit_pointer:
30522 case DW_OP_GNU_implicit_pointer:
30523 hstate.add_int (val2->v.val_int);
30524 break;
30525 case DW_OP_entry_value:
30526 case DW_OP_GNU_entry_value:
30527 hstate.add_object (val1->v.val_loc);
30528 break;
30529 case DW_OP_regval_type:
30530 case DW_OP_deref_type:
30531 case DW_OP_GNU_regval_type:
30532 case DW_OP_GNU_deref_type:
30533 {
30534 unsigned int byte_size
30535 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30536 unsigned int encoding
30537 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30538 hstate.add_object (val1->v.val_int);
30539 hstate.add_object (byte_size);
30540 hstate.add_object (encoding);
30541 }
30542 break;
30543 case DW_OP_convert:
30544 case DW_OP_reinterpret:
30545 case DW_OP_GNU_convert:
30546 case DW_OP_GNU_reinterpret:
30547 if (val1->val_class == dw_val_class_unsigned_const)
30548 {
30549 hstate.add_object (val1->v.val_unsigned);
30550 break;
30551 }
30552 /* FALLTHRU */
30553 case DW_OP_const_type:
30554 case DW_OP_GNU_const_type:
30555 {
30556 unsigned int byte_size
30557 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30558 unsigned int encoding
30559 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30560 hstate.add_object (byte_size);
30561 hstate.add_object (encoding);
30562 if (loc->dw_loc_opc != DW_OP_const_type
30563 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30564 break;
30565 hstate.add_object (val2->val_class);
30566 switch (val2->val_class)
30567 {
30568 case dw_val_class_const:
30569 hstate.add_object (val2->v.val_int);
30570 break;
30571 case dw_val_class_vec:
30572 {
30573 unsigned int elt_size = val2->v.val_vec.elt_size;
30574 unsigned int len = val2->v.val_vec.length;
30575
30576 hstate.add_object (elt_size);
30577 hstate.add_object (len);
30578 hstate.add (val2->v.val_vec.array, len * elt_size);
30579 }
30580 break;
30581 case dw_val_class_const_double:
30582 hstate.add_object (val2->v.val_double.low);
30583 hstate.add_object (val2->v.val_double.high);
30584 break;
30585 case dw_val_class_wide_int:
30586 hstate.add (val2->v.val_wide->get_val (),
30587 get_full_len (*val2->v.val_wide)
30588 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30589 break;
30590 default:
30591 gcc_unreachable ();
30592 }
30593 }
30594 break;
30595
30596 default:
30597 /* Other codes have no operands. */
30598 break;
30599 }
30600 }
30601
30602 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30603
30604 static inline void
30605 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30606 {
30607 dw_loc_descr_ref l;
30608 bool sizes_computed = false;
30609 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30610 size_of_locs (loc);
30611
30612 for (l = loc; l != NULL; l = l->dw_loc_next)
30613 {
30614 enum dwarf_location_atom opc = l->dw_loc_opc;
30615 hstate.add_object (opc);
30616 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30617 {
30618 size_of_locs (loc);
30619 sizes_computed = true;
30620 }
30621 hash_loc_operands (l, hstate);
30622 }
30623 }
30624
30625 /* Compute hash of the whole location list LIST_HEAD. */
30626
30627 static inline void
30628 hash_loc_list (dw_loc_list_ref list_head)
30629 {
30630 dw_loc_list_ref curr = list_head;
30631 inchash::hash hstate;
30632
30633 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30634 {
30635 hstate.add (curr->begin, strlen (curr->begin) + 1);
30636 hstate.add (curr->end, strlen (curr->end) + 1);
30637 hstate.add_object (curr->vbegin);
30638 hstate.add_object (curr->vend);
30639 if (curr->section)
30640 hstate.add (curr->section, strlen (curr->section) + 1);
30641 hash_locs (curr->expr, hstate);
30642 }
30643 list_head->hash = hstate.end ();
30644 }
30645
30646 /* Return true if X and Y opcodes have the same operands. */
30647
30648 static inline bool
30649 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30650 {
30651 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30652 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30653 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30654 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30655
30656 switch (x->dw_loc_opc)
30657 {
30658 case DW_OP_const4u:
30659 case DW_OP_const8u:
30660 if (x->dtprel)
30661 goto hash_addr;
30662 /* FALLTHRU */
30663 case DW_OP_const1u:
30664 case DW_OP_const1s:
30665 case DW_OP_const2u:
30666 case DW_OP_const2s:
30667 case DW_OP_const4s:
30668 case DW_OP_const8s:
30669 case DW_OP_constu:
30670 case DW_OP_consts:
30671 case DW_OP_pick:
30672 case DW_OP_plus_uconst:
30673 case DW_OP_breg0:
30674 case DW_OP_breg1:
30675 case DW_OP_breg2:
30676 case DW_OP_breg3:
30677 case DW_OP_breg4:
30678 case DW_OP_breg5:
30679 case DW_OP_breg6:
30680 case DW_OP_breg7:
30681 case DW_OP_breg8:
30682 case DW_OP_breg9:
30683 case DW_OP_breg10:
30684 case DW_OP_breg11:
30685 case DW_OP_breg12:
30686 case DW_OP_breg13:
30687 case DW_OP_breg14:
30688 case DW_OP_breg15:
30689 case DW_OP_breg16:
30690 case DW_OP_breg17:
30691 case DW_OP_breg18:
30692 case DW_OP_breg19:
30693 case DW_OP_breg20:
30694 case DW_OP_breg21:
30695 case DW_OP_breg22:
30696 case DW_OP_breg23:
30697 case DW_OP_breg24:
30698 case DW_OP_breg25:
30699 case DW_OP_breg26:
30700 case DW_OP_breg27:
30701 case DW_OP_breg28:
30702 case DW_OP_breg29:
30703 case DW_OP_breg30:
30704 case DW_OP_breg31:
30705 case DW_OP_regx:
30706 case DW_OP_fbreg:
30707 case DW_OP_piece:
30708 case DW_OP_deref_size:
30709 case DW_OP_xderef_size:
30710 return valx1->v.val_int == valy1->v.val_int;
30711 case DW_OP_skip:
30712 case DW_OP_bra:
30713 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30714 can cause irrelevant differences in dw_loc_addr. */
30715 gcc_assert (valx1->val_class == dw_val_class_loc
30716 && valy1->val_class == dw_val_class_loc
30717 && (dwarf_split_debug_info
30718 || x->dw_loc_addr == y->dw_loc_addr));
30719 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30720 case DW_OP_implicit_value:
30721 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30722 || valx2->val_class != valy2->val_class)
30723 return false;
30724 switch (valx2->val_class)
30725 {
30726 case dw_val_class_const:
30727 return valx2->v.val_int == valy2->v.val_int;
30728 case dw_val_class_vec:
30729 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30730 && valx2->v.val_vec.length == valy2->v.val_vec.length
30731 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30732 valx2->v.val_vec.elt_size
30733 * valx2->v.val_vec.length) == 0;
30734 case dw_val_class_const_double:
30735 return valx2->v.val_double.low == valy2->v.val_double.low
30736 && valx2->v.val_double.high == valy2->v.val_double.high;
30737 case dw_val_class_wide_int:
30738 return *valx2->v.val_wide == *valy2->v.val_wide;
30739 case dw_val_class_addr:
30740 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30741 default:
30742 gcc_unreachable ();
30743 }
30744 case DW_OP_bregx:
30745 case DW_OP_bit_piece:
30746 return valx1->v.val_int == valy1->v.val_int
30747 && valx2->v.val_int == valy2->v.val_int;
30748 case DW_OP_addr:
30749 hash_addr:
30750 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30751 case DW_OP_GNU_addr_index:
30752 case DW_OP_addrx:
30753 case DW_OP_GNU_const_index:
30754 case DW_OP_constx:
30755 {
30756 rtx ax1 = valx1->val_entry->addr.rtl;
30757 rtx ay1 = valy1->val_entry->addr.rtl;
30758 return rtx_equal_p (ax1, ay1);
30759 }
30760 case DW_OP_implicit_pointer:
30761 case DW_OP_GNU_implicit_pointer:
30762 return valx1->val_class == dw_val_class_die_ref
30763 && valx1->val_class == valy1->val_class
30764 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30765 && valx2->v.val_int == valy2->v.val_int;
30766 case DW_OP_entry_value:
30767 case DW_OP_GNU_entry_value:
30768 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30769 case DW_OP_const_type:
30770 case DW_OP_GNU_const_type:
30771 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30772 || valx2->val_class != valy2->val_class)
30773 return false;
30774 switch (valx2->val_class)
30775 {
30776 case dw_val_class_const:
30777 return valx2->v.val_int == valy2->v.val_int;
30778 case dw_val_class_vec:
30779 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30780 && valx2->v.val_vec.length == valy2->v.val_vec.length
30781 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30782 valx2->v.val_vec.elt_size
30783 * valx2->v.val_vec.length) == 0;
30784 case dw_val_class_const_double:
30785 return valx2->v.val_double.low == valy2->v.val_double.low
30786 && valx2->v.val_double.high == valy2->v.val_double.high;
30787 case dw_val_class_wide_int:
30788 return *valx2->v.val_wide == *valy2->v.val_wide;
30789 default:
30790 gcc_unreachable ();
30791 }
30792 case DW_OP_regval_type:
30793 case DW_OP_deref_type:
30794 case DW_OP_GNU_regval_type:
30795 case DW_OP_GNU_deref_type:
30796 return valx1->v.val_int == valy1->v.val_int
30797 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30798 case DW_OP_convert:
30799 case DW_OP_reinterpret:
30800 case DW_OP_GNU_convert:
30801 case DW_OP_GNU_reinterpret:
30802 if (valx1->val_class != valy1->val_class)
30803 return false;
30804 if (valx1->val_class == dw_val_class_unsigned_const)
30805 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30806 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30807 case DW_OP_GNU_parameter_ref:
30808 return valx1->val_class == dw_val_class_die_ref
30809 && valx1->val_class == valy1->val_class
30810 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30811 default:
30812 /* Other codes have no operands. */
30813 return true;
30814 }
30815 }
30816
30817 /* Return true if DWARF location expressions X and Y are the same. */
30818
30819 static inline bool
30820 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30821 {
30822 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30823 if (x->dw_loc_opc != y->dw_loc_opc
30824 || x->dtprel != y->dtprel
30825 || !compare_loc_operands (x, y))
30826 break;
30827 return x == NULL && y == NULL;
30828 }
30829
30830 /* Hashtable helpers. */
30831
30832 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30833 {
30834 static inline hashval_t hash (const dw_loc_list_struct *);
30835 static inline bool equal (const dw_loc_list_struct *,
30836 const dw_loc_list_struct *);
30837 };
30838
30839 /* Return precomputed hash of location list X. */
30840
30841 inline hashval_t
30842 loc_list_hasher::hash (const dw_loc_list_struct *x)
30843 {
30844 return x->hash;
30845 }
30846
30847 /* Return true if location lists A and B are the same. */
30848
30849 inline bool
30850 loc_list_hasher::equal (const dw_loc_list_struct *a,
30851 const dw_loc_list_struct *b)
30852 {
30853 if (a == b)
30854 return 1;
30855 if (a->hash != b->hash)
30856 return 0;
30857 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30858 if (strcmp (a->begin, b->begin) != 0
30859 || strcmp (a->end, b->end) != 0
30860 || (a->section == NULL) != (b->section == NULL)
30861 || (a->section && strcmp (a->section, b->section) != 0)
30862 || a->vbegin != b->vbegin || a->vend != b->vend
30863 || !compare_locs (a->expr, b->expr))
30864 break;
30865 return a == NULL && b == NULL;
30866 }
30867
30868 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30869
30870
30871 /* Recursively optimize location lists referenced from DIE
30872 children and share them whenever possible. */
30873
30874 static void
30875 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30876 {
30877 dw_die_ref c;
30878 dw_attr_node *a;
30879 unsigned ix;
30880 dw_loc_list_struct **slot;
30881 bool drop_locviews = false;
30882 bool has_locviews = false;
30883
30884 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30885 if (AT_class (a) == dw_val_class_loc_list)
30886 {
30887 dw_loc_list_ref list = AT_loc_list (a);
30888 /* TODO: perform some optimizations here, before hashing
30889 it and storing into the hash table. */
30890 hash_loc_list (list);
30891 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30892 if (*slot == NULL)
30893 {
30894 *slot = list;
30895 if (loc_list_has_views (list))
30896 gcc_assert (list->vl_symbol);
30897 else if (list->vl_symbol)
30898 {
30899 drop_locviews = true;
30900 list->vl_symbol = NULL;
30901 }
30902 }
30903 else
30904 {
30905 if (list->vl_symbol && !(*slot)->vl_symbol)
30906 drop_locviews = true;
30907 a->dw_attr_val.v.val_loc_list = *slot;
30908 }
30909 }
30910 else if (AT_class (a) == dw_val_class_view_list)
30911 {
30912 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30913 has_locviews = true;
30914 }
30915
30916
30917 if (drop_locviews && has_locviews)
30918 remove_AT (die, DW_AT_GNU_locviews);
30919
30920 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
30921 }
30922
30923
30924 /* Recursively assign each location list a unique index into the debug_addr
30925 section. */
30926
30927 static void
30928 index_location_lists (dw_die_ref die)
30929 {
30930 dw_die_ref c;
30931 dw_attr_node *a;
30932 unsigned ix;
30933
30934 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30935 if (AT_class (a) == dw_val_class_loc_list)
30936 {
30937 dw_loc_list_ref list = AT_loc_list (a);
30938 dw_loc_list_ref curr;
30939 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
30940 {
30941 /* Don't index an entry that has already been indexed
30942 or won't be output. Make sure skip_loc_list_entry doesn't
30943 call size_of_locs, because that might cause circular dependency,
30944 index_location_lists requiring address table indexes to be
30945 computed, but adding new indexes through add_addr_table_entry
30946 and address table index computation requiring no new additions
30947 to the hash table. In the rare case of DWARF[234] >= 64KB
30948 location expression, we'll just waste unused address table entry
30949 for it. */
30950 if (curr->begin_entry != NULL
30951 || skip_loc_list_entry (curr))
30952 continue;
30953
30954 curr->begin_entry
30955 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
30956 }
30957 }
30958
30959 FOR_EACH_CHILD (die, c, index_location_lists (c));
30960 }
30961
30962 /* Optimize location lists referenced from DIE
30963 children and share them whenever possible. */
30964
30965 static void
30966 optimize_location_lists (dw_die_ref die)
30967 {
30968 loc_list_hash_type htab (500);
30969 optimize_location_lists_1 (die, &htab);
30970 }
30971 \f
30972 /* Traverse the limbo die list, and add parent/child links. The only
30973 dies without parents that should be here are concrete instances of
30974 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
30975 For concrete instances, we can get the parent die from the abstract
30976 instance. */
30977
30978 static void
30979 flush_limbo_die_list (void)
30980 {
30981 limbo_die_node *node;
30982
30983 /* get_context_die calls force_decl_die, which can put new DIEs on the
30984 limbo list in LTO mode when nested functions are put in a different
30985 partition than that of their parent function. */
30986 while ((node = limbo_die_list))
30987 {
30988 dw_die_ref die = node->die;
30989 limbo_die_list = node->next;
30990
30991 if (die->die_parent == NULL)
30992 {
30993 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
30994
30995 if (origin && origin->die_parent)
30996 add_child_die (origin->die_parent, die);
30997 else if (is_cu_die (die))
30998 ;
30999 else if (seen_error ())
31000 /* It's OK to be confused by errors in the input. */
31001 add_child_die (comp_unit_die (), die);
31002 else
31003 {
31004 /* In certain situations, the lexical block containing a
31005 nested function can be optimized away, which results
31006 in the nested function die being orphaned. Likewise
31007 with the return type of that nested function. Force
31008 this to be a child of the containing function.
31009
31010 It may happen that even the containing function got fully
31011 inlined and optimized out. In that case we are lost and
31012 assign the empty child. This should not be big issue as
31013 the function is likely unreachable too. */
31014 gcc_assert (node->created_for);
31015
31016 if (DECL_P (node->created_for))
31017 origin = get_context_die (DECL_CONTEXT (node->created_for));
31018 else if (TYPE_P (node->created_for))
31019 origin = scope_die_for (node->created_for, comp_unit_die ());
31020 else
31021 origin = comp_unit_die ();
31022
31023 add_child_die (origin, die);
31024 }
31025 }
31026 }
31027 }
31028
31029 /* Reset DIEs so we can output them again. */
31030
31031 static void
31032 reset_dies (dw_die_ref die)
31033 {
31034 dw_die_ref c;
31035
31036 /* Remove stuff we re-generate. */
31037 die->die_mark = 0;
31038 die->die_offset = 0;
31039 die->die_abbrev = 0;
31040 remove_AT (die, DW_AT_sibling);
31041
31042 FOR_EACH_CHILD (die, c, reset_dies (c));
31043 }
31044
31045 /* Output stuff that dwarf requires at the end of every file,
31046 and generate the DWARF-2 debugging info. */
31047
31048 static void
31049 dwarf2out_finish (const char *)
31050 {
31051 comdat_type_node *ctnode;
31052 dw_die_ref main_comp_unit_die;
31053 unsigned char checksum[16];
31054 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31055
31056 /* Flush out any latecomers to the limbo party. */
31057 flush_limbo_die_list ();
31058
31059 if (inline_entry_data_table)
31060 gcc_assert (inline_entry_data_table->elements () == 0);
31061
31062 if (flag_checking)
31063 {
31064 verify_die (comp_unit_die ());
31065 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31066 verify_die (node->die);
31067 }
31068
31069 /* We shouldn't have any symbols with delayed asm names for
31070 DIEs generated after early finish. */
31071 gcc_assert (deferred_asm_name == NULL);
31072
31073 gen_remaining_tmpl_value_param_die_attribute ();
31074
31075 if (flag_generate_lto || flag_generate_offload)
31076 {
31077 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31078
31079 /* Prune stuff so that dwarf2out_finish runs successfully
31080 for the fat part of the object. */
31081 reset_dies (comp_unit_die ());
31082 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31083 reset_dies (node->die);
31084
31085 hash_table<comdat_type_hasher> comdat_type_table (100);
31086 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31087 {
31088 comdat_type_node **slot
31089 = comdat_type_table.find_slot (ctnode, INSERT);
31090
31091 /* Don't reset types twice. */
31092 if (*slot != HTAB_EMPTY_ENTRY)
31093 continue;
31094
31095 /* Add a pointer to the line table for the main compilation unit
31096 so that the debugger can make sense of DW_AT_decl_file
31097 attributes. */
31098 if (debug_info_level >= DINFO_LEVEL_TERSE)
31099 reset_dies (ctnode->root_die);
31100
31101 *slot = ctnode;
31102 }
31103
31104 /* Reset die CU symbol so we don't output it twice. */
31105 comp_unit_die ()->die_id.die_symbol = NULL;
31106
31107 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31108 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31109 if (have_macinfo)
31110 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31111
31112 /* Remove indirect string decisions. */
31113 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31114 }
31115
31116 #if ENABLE_ASSERT_CHECKING
31117 {
31118 dw_die_ref die = comp_unit_die (), c;
31119 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31120 }
31121 #endif
31122 resolve_addr (comp_unit_die ());
31123 move_marked_base_types ();
31124
31125 /* Initialize sections and labels used for actual assembler output. */
31126 unsigned generation = init_sections_and_labels (false);
31127
31128 /* Traverse the DIE's and add sibling attributes to those DIE's that
31129 have children. */
31130 add_sibling_attributes (comp_unit_die ());
31131 limbo_die_node *node;
31132 for (node = cu_die_list; node; node = node->next)
31133 add_sibling_attributes (node->die);
31134 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31135 add_sibling_attributes (ctnode->root_die);
31136
31137 /* When splitting DWARF info, we put some attributes in the
31138 skeleton compile_unit DIE that remains in the .o, while
31139 most attributes go in the DWO compile_unit_die. */
31140 if (dwarf_split_debug_info)
31141 {
31142 limbo_die_node *cu;
31143 main_comp_unit_die = gen_compile_unit_die (NULL);
31144 if (dwarf_version >= 5)
31145 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31146 cu = limbo_die_list;
31147 gcc_assert (cu->die == main_comp_unit_die);
31148 limbo_die_list = limbo_die_list->next;
31149 cu->next = cu_die_list;
31150 cu_die_list = cu;
31151 }
31152 else
31153 main_comp_unit_die = comp_unit_die ();
31154
31155 /* Output a terminator label for the .text section. */
31156 switch_to_section (text_section);
31157 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31158 if (cold_text_section)
31159 {
31160 switch_to_section (cold_text_section);
31161 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31162 }
31163
31164 /* We can only use the low/high_pc attributes if all of the code was
31165 in .text. */
31166 if (!have_multiple_function_sections
31167 || (dwarf_version < 3 && dwarf_strict))
31168 {
31169 /* Don't add if the CU has no associated code. */
31170 if (text_section_used)
31171 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31172 text_end_label, true);
31173 }
31174 else
31175 {
31176 unsigned fde_idx;
31177 dw_fde_ref fde;
31178 bool range_list_added = false;
31179
31180 if (text_section_used)
31181 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31182 text_end_label, &range_list_added, true);
31183 if (cold_text_section_used)
31184 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31185 cold_end_label, &range_list_added, true);
31186
31187 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31188 {
31189 if (DECL_IGNORED_P (fde->decl))
31190 continue;
31191 if (!fde->in_std_section)
31192 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31193 fde->dw_fde_end, &range_list_added,
31194 true);
31195 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31196 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31197 fde->dw_fde_second_end, &range_list_added,
31198 true);
31199 }
31200
31201 if (range_list_added)
31202 {
31203 /* We need to give .debug_loc and .debug_ranges an appropriate
31204 "base address". Use zero so that these addresses become
31205 absolute. Historically, we've emitted the unexpected
31206 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31207 Emit both to give time for other tools to adapt. */
31208 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31209 if (! dwarf_strict && dwarf_version < 4)
31210 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31211
31212 add_ranges (NULL);
31213 }
31214 }
31215
31216 /* AIX Assembler inserts the length, so adjust the reference to match the
31217 offset expected by debuggers. */
31218 strcpy (dl_section_ref, debug_line_section_label);
31219 if (XCOFF_DEBUGGING_INFO)
31220 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31221
31222 if (debug_info_level >= DINFO_LEVEL_TERSE)
31223 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31224 dl_section_ref);
31225
31226 if (have_macinfo)
31227 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31228 macinfo_section_label);
31229
31230 if (dwarf_split_debug_info)
31231 {
31232 if (have_location_lists)
31233 {
31234 if (dwarf_version >= 5)
31235 add_AT_loclistsptr (comp_unit_die (), DW_AT_loclists_base,
31236 loc_section_label);
31237 /* optimize_location_lists calculates the size of the lists,
31238 so index them first, and assign indices to the entries.
31239 Although optimize_location_lists will remove entries from
31240 the table, it only does so for duplicates, and therefore
31241 only reduces ref_counts to 1. */
31242 index_location_lists (comp_unit_die ());
31243 }
31244
31245 if (addr_index_table != NULL)
31246 {
31247 unsigned int index = 0;
31248 addr_index_table
31249 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31250 (&index);
31251 }
31252 }
31253
31254 loc_list_idx = 0;
31255 if (have_location_lists)
31256 {
31257 optimize_location_lists (comp_unit_die ());
31258 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31259 if (dwarf_version >= 5 && dwarf_split_debug_info)
31260 assign_location_list_indexes (comp_unit_die ());
31261 }
31262
31263 save_macinfo_strings ();
31264
31265 if (dwarf_split_debug_info)
31266 {
31267 unsigned int index = 0;
31268
31269 /* Add attributes common to skeleton compile_units and
31270 type_units. Because these attributes include strings, it
31271 must be done before freezing the string table. Top-level
31272 skeleton die attrs are added when the skeleton type unit is
31273 created, so ensure it is created by this point. */
31274 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31275 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31276 }
31277
31278 /* Output all of the compilation units. We put the main one last so that
31279 the offsets are available to output_pubnames. */
31280 for (node = cu_die_list; node; node = node->next)
31281 output_comp_unit (node->die, 0, NULL);
31282
31283 hash_table<comdat_type_hasher> comdat_type_table (100);
31284 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31285 {
31286 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31287
31288 /* Don't output duplicate types. */
31289 if (*slot != HTAB_EMPTY_ENTRY)
31290 continue;
31291
31292 /* Add a pointer to the line table for the main compilation unit
31293 so that the debugger can make sense of DW_AT_decl_file
31294 attributes. */
31295 if (debug_info_level >= DINFO_LEVEL_TERSE)
31296 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31297 (!dwarf_split_debug_info
31298 ? dl_section_ref
31299 : debug_skeleton_line_section_label));
31300
31301 output_comdat_type_unit (ctnode);
31302 *slot = ctnode;
31303 }
31304
31305 if (dwarf_split_debug_info)
31306 {
31307 int mark;
31308 struct md5_ctx ctx;
31309
31310 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31311 index_rnglists ();
31312
31313 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31314 md5_init_ctx (&ctx);
31315 mark = 0;
31316 die_checksum (comp_unit_die (), &ctx, &mark);
31317 unmark_all_dies (comp_unit_die ());
31318 md5_finish_ctx (&ctx, checksum);
31319
31320 if (dwarf_version < 5)
31321 {
31322 /* Use the first 8 bytes of the checksum as the dwo_id,
31323 and add it to both comp-unit DIEs. */
31324 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31325 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31326 }
31327
31328 /* Add the base offset of the ranges table to the skeleton
31329 comp-unit DIE. */
31330 if (!vec_safe_is_empty (ranges_table))
31331 {
31332 if (dwarf_version >= 5)
31333 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31334 ranges_base_label);
31335 else
31336 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31337 ranges_section_label);
31338 }
31339
31340 switch_to_section (debug_addr_section);
31341 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31342 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31343 before DWARF5, didn't have a header for .debug_addr units.
31344 DWARF5 specifies a small header when address tables are used. */
31345 if (dwarf_version >= 5)
31346 {
31347 unsigned int last_idx = 0;
31348 unsigned long addrs_length;
31349
31350 addr_index_table->traverse_noresize
31351 <unsigned int *, count_index_addrs> (&last_idx);
31352 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31353
31354 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31355 dw2_asm_output_data (4, 0xffffffff,
31356 "Escape value for 64-bit DWARF extension");
31357 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31358 "Length of Address Unit");
31359 dw2_asm_output_data (2, 5, "DWARF addr version");
31360 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31361 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31362 }
31363 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31364 output_addr_table ();
31365 }
31366
31367 /* Output the main compilation unit if non-empty or if .debug_macinfo
31368 or .debug_macro will be emitted. */
31369 output_comp_unit (comp_unit_die (), have_macinfo,
31370 dwarf_split_debug_info ? checksum : NULL);
31371
31372 if (dwarf_split_debug_info && info_section_emitted)
31373 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31374
31375 /* Output the abbreviation table. */
31376 if (vec_safe_length (abbrev_die_table) != 1)
31377 {
31378 switch_to_section (debug_abbrev_section);
31379 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31380 output_abbrev_section ();
31381 }
31382
31383 /* Output location list section if necessary. */
31384 if (have_location_lists)
31385 {
31386 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31387 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31388 /* Output the location lists info. */
31389 switch_to_section (debug_loc_section);
31390 if (dwarf_version >= 5)
31391 {
31392 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 1);
31393 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 2);
31394 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31395 dw2_asm_output_data (4, 0xffffffff,
31396 "Initial length escape value indicating "
31397 "64-bit DWARF extension");
31398 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31399 "Length of Location Lists");
31400 ASM_OUTPUT_LABEL (asm_out_file, l1);
31401 output_dwarf_version ();
31402 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31403 dw2_asm_output_data (1, 0, "Segment Size");
31404 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31405 "Offset Entry Count");
31406 }
31407 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31408 if (dwarf_version >= 5 && dwarf_split_debug_info)
31409 {
31410 unsigned int save_loc_list_idx = loc_list_idx;
31411 loc_list_idx = 0;
31412 output_loclists_offsets (comp_unit_die ());
31413 gcc_assert (save_loc_list_idx == loc_list_idx);
31414 }
31415 output_location_lists (comp_unit_die ());
31416 if (dwarf_version >= 5)
31417 ASM_OUTPUT_LABEL (asm_out_file, l2);
31418 }
31419
31420 output_pubtables ();
31421
31422 /* Output the address range information if a CU (.debug_info section)
31423 was emitted. We output an empty table even if we had no functions
31424 to put in it. This because the consumer has no way to tell the
31425 difference between an empty table that we omitted and failure to
31426 generate a table that would have contained data. */
31427 if (info_section_emitted)
31428 {
31429 switch_to_section (debug_aranges_section);
31430 output_aranges ();
31431 }
31432
31433 /* Output ranges section if necessary. */
31434 if (!vec_safe_is_empty (ranges_table))
31435 {
31436 if (dwarf_version >= 5)
31437 output_rnglists (generation);
31438 else
31439 output_ranges ();
31440 }
31441
31442 /* Have to end the macro section. */
31443 if (have_macinfo)
31444 {
31445 switch_to_section (debug_macinfo_section);
31446 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31447 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31448 : debug_skeleton_line_section_label, false);
31449 dw2_asm_output_data (1, 0, "End compilation unit");
31450 }
31451
31452 /* Output the source line correspondence table. We must do this
31453 even if there is no line information. Otherwise, on an empty
31454 translation unit, we will generate a present, but empty,
31455 .debug_info section. IRIX 6.5 `nm' will then complain when
31456 examining the file. This is done late so that any filenames
31457 used by the debug_info section are marked as 'used'. */
31458 switch_to_section (debug_line_section);
31459 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31460 if (! output_asm_line_debug_info ())
31461 output_line_info (false);
31462
31463 if (dwarf_split_debug_info && info_section_emitted)
31464 {
31465 switch_to_section (debug_skeleton_line_section);
31466 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31467 output_line_info (true);
31468 }
31469
31470 /* If we emitted any indirect strings, output the string table too. */
31471 if (debug_str_hash || skeleton_debug_str_hash)
31472 output_indirect_strings ();
31473 if (debug_line_str_hash)
31474 {
31475 switch_to_section (debug_line_str_section);
31476 const enum dwarf_form form = DW_FORM_line_strp;
31477 debug_line_str_hash->traverse<enum dwarf_form,
31478 output_indirect_string> (form);
31479 }
31480
31481 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31482 symview_upper_bound = 0;
31483 if (zero_view_p)
31484 bitmap_clear (zero_view_p);
31485 }
31486
31487 /* Returns a hash value for X (which really is a variable_value_struct). */
31488
31489 inline hashval_t
31490 variable_value_hasher::hash (variable_value_struct *x)
31491 {
31492 return (hashval_t) x->decl_id;
31493 }
31494
31495 /* Return nonzero if decl_id of variable_value_struct X is the same as
31496 UID of decl Y. */
31497
31498 inline bool
31499 variable_value_hasher::equal (variable_value_struct *x, tree y)
31500 {
31501 return x->decl_id == DECL_UID (y);
31502 }
31503
31504 /* Helper function for resolve_variable_value, handle
31505 DW_OP_GNU_variable_value in one location expression.
31506 Return true if exprloc has been changed into loclist. */
31507
31508 static bool
31509 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31510 {
31511 dw_loc_descr_ref next;
31512 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31513 {
31514 next = loc->dw_loc_next;
31515 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31516 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31517 continue;
31518
31519 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31520 if (DECL_CONTEXT (decl) != current_function_decl)
31521 continue;
31522
31523 dw_die_ref ref = lookup_decl_die (decl);
31524 if (ref)
31525 {
31526 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31527 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31528 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31529 continue;
31530 }
31531 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31532 if (l == NULL)
31533 continue;
31534 if (l->dw_loc_next)
31535 {
31536 if (AT_class (a) != dw_val_class_loc)
31537 continue;
31538 switch (a->dw_attr)
31539 {
31540 /* Following attributes allow both exprloc and loclist
31541 classes, so we can change them into a loclist. */
31542 case DW_AT_location:
31543 case DW_AT_string_length:
31544 case DW_AT_return_addr:
31545 case DW_AT_data_member_location:
31546 case DW_AT_frame_base:
31547 case DW_AT_segment:
31548 case DW_AT_static_link:
31549 case DW_AT_use_location:
31550 case DW_AT_vtable_elem_location:
31551 if (prev)
31552 {
31553 prev->dw_loc_next = NULL;
31554 prepend_loc_descr_to_each (l, AT_loc (a));
31555 }
31556 if (next)
31557 add_loc_descr_to_each (l, next);
31558 a->dw_attr_val.val_class = dw_val_class_loc_list;
31559 a->dw_attr_val.val_entry = NULL;
31560 a->dw_attr_val.v.val_loc_list = l;
31561 have_location_lists = true;
31562 return true;
31563 /* Following attributes allow both exprloc and reference,
31564 so if the whole expression is DW_OP_GNU_variable_value alone
31565 we could transform it into reference. */
31566 case DW_AT_byte_size:
31567 case DW_AT_bit_size:
31568 case DW_AT_lower_bound:
31569 case DW_AT_upper_bound:
31570 case DW_AT_bit_stride:
31571 case DW_AT_count:
31572 case DW_AT_allocated:
31573 case DW_AT_associated:
31574 case DW_AT_byte_stride:
31575 if (prev == NULL && next == NULL)
31576 break;
31577 /* FALLTHRU */
31578 default:
31579 if (dwarf_strict)
31580 continue;
31581 break;
31582 }
31583 /* Create DW_TAG_variable that we can refer to. */
31584 gen_decl_die (decl, NULL_TREE, NULL,
31585 lookup_decl_die (current_function_decl));
31586 ref = lookup_decl_die (decl);
31587 if (ref)
31588 {
31589 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31590 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31591 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31592 }
31593 continue;
31594 }
31595 if (prev)
31596 {
31597 prev->dw_loc_next = l->expr;
31598 add_loc_descr (&prev->dw_loc_next, next);
31599 free_loc_descr (loc, NULL);
31600 next = prev->dw_loc_next;
31601 }
31602 else
31603 {
31604 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31605 add_loc_descr (&loc, next);
31606 next = loc;
31607 }
31608 loc = prev;
31609 }
31610 return false;
31611 }
31612
31613 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31614
31615 static void
31616 resolve_variable_value (dw_die_ref die)
31617 {
31618 dw_attr_node *a;
31619 dw_loc_list_ref loc;
31620 unsigned ix;
31621
31622 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31623 switch (AT_class (a))
31624 {
31625 case dw_val_class_loc:
31626 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31627 break;
31628 /* FALLTHRU */
31629 case dw_val_class_loc_list:
31630 loc = AT_loc_list (a);
31631 gcc_assert (loc);
31632 for (; loc; loc = loc->dw_loc_next)
31633 resolve_variable_value_in_expr (a, loc->expr);
31634 break;
31635 default:
31636 break;
31637 }
31638 }
31639
31640 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31641 temporaries in the current function. */
31642
31643 static void
31644 resolve_variable_values (void)
31645 {
31646 if (!variable_value_hash || !current_function_decl)
31647 return;
31648
31649 struct variable_value_struct *node
31650 = variable_value_hash->find_with_hash (current_function_decl,
31651 DECL_UID (current_function_decl));
31652
31653 if (node == NULL)
31654 return;
31655
31656 unsigned int i;
31657 dw_die_ref die;
31658 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31659 resolve_variable_value (die);
31660 }
31661
31662 /* Helper function for note_variable_value, handle one location
31663 expression. */
31664
31665 static void
31666 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31667 {
31668 for (; loc; loc = loc->dw_loc_next)
31669 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31670 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31671 {
31672 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31673 dw_die_ref ref = lookup_decl_die (decl);
31674 if (! ref && (flag_generate_lto || flag_generate_offload))
31675 {
31676 /* ??? This is somewhat a hack because we do not create DIEs
31677 for variables not in BLOCK trees early but when generating
31678 early LTO output we need the dw_val_class_decl_ref to be
31679 fully resolved. For fat LTO objects we'd also like to
31680 undo this after LTO dwarf output. */
31681 gcc_assert (DECL_CONTEXT (decl));
31682 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31683 gcc_assert (ctx != NULL);
31684 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31685 ref = lookup_decl_die (decl);
31686 gcc_assert (ref != NULL);
31687 }
31688 if (ref)
31689 {
31690 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31691 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31692 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31693 continue;
31694 }
31695 if (VAR_P (decl)
31696 && DECL_CONTEXT (decl)
31697 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31698 && lookup_decl_die (DECL_CONTEXT (decl)))
31699 {
31700 if (!variable_value_hash)
31701 variable_value_hash
31702 = hash_table<variable_value_hasher>::create_ggc (10);
31703
31704 tree fndecl = DECL_CONTEXT (decl);
31705 struct variable_value_struct *node;
31706 struct variable_value_struct **slot
31707 = variable_value_hash->find_slot_with_hash (fndecl,
31708 DECL_UID (fndecl),
31709 INSERT);
31710 if (*slot == NULL)
31711 {
31712 node = ggc_cleared_alloc<variable_value_struct> ();
31713 node->decl_id = DECL_UID (fndecl);
31714 *slot = node;
31715 }
31716 else
31717 node = *slot;
31718
31719 vec_safe_push (node->dies, die);
31720 }
31721 }
31722 }
31723
31724 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31725 with dw_val_class_decl_ref operand. */
31726
31727 static void
31728 note_variable_value (dw_die_ref die)
31729 {
31730 dw_die_ref c;
31731 dw_attr_node *a;
31732 dw_loc_list_ref loc;
31733 unsigned ix;
31734
31735 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31736 switch (AT_class (a))
31737 {
31738 case dw_val_class_loc_list:
31739 loc = AT_loc_list (a);
31740 gcc_assert (loc);
31741 if (!loc->noted_variable_value)
31742 {
31743 loc->noted_variable_value = 1;
31744 for (; loc; loc = loc->dw_loc_next)
31745 note_variable_value_in_expr (die, loc->expr);
31746 }
31747 break;
31748 case dw_val_class_loc:
31749 note_variable_value_in_expr (die, AT_loc (a));
31750 break;
31751 default:
31752 break;
31753 }
31754
31755 /* Mark children. */
31756 FOR_EACH_CHILD (die, c, note_variable_value (c));
31757 }
31758
31759 /* Perform any cleanups needed after the early debug generation pass
31760 has run. */
31761
31762 static void
31763 dwarf2out_early_finish (const char *filename)
31764 {
31765 set_early_dwarf s;
31766 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31767
31768 /* PCH might result in DW_AT_producer string being restored from the
31769 header compilation, so always fill it with empty string initially
31770 and overwrite only here. */
31771 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31772 producer_string = gen_producer_string ();
31773 producer->dw_attr_val.v.val_str->refcount--;
31774 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31775
31776 /* Add the name for the main input file now. We delayed this from
31777 dwarf2out_init to avoid complications with PCH. */
31778 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31779 add_comp_dir_attribute (comp_unit_die ());
31780
31781 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31782 DW_AT_comp_dir into .debug_line_str section. */
31783 if (!dwarf2out_as_loc_support
31784 && dwarf_version >= 5
31785 && DWARF5_USE_DEBUG_LINE_STR)
31786 {
31787 for (int i = 0; i < 2; i++)
31788 {
31789 dw_attr_node *a = get_AT (comp_unit_die (),
31790 i ? DW_AT_comp_dir : DW_AT_name);
31791 if (a == NULL
31792 || AT_class (a) != dw_val_class_str
31793 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31794 continue;
31795
31796 if (! debug_line_str_hash)
31797 debug_line_str_hash
31798 = hash_table<indirect_string_hasher>::create_ggc (10);
31799
31800 struct indirect_string_node *node
31801 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31802 set_indirect_string (node);
31803 node->form = DW_FORM_line_strp;
31804 a->dw_attr_val.v.val_str->refcount--;
31805 a->dw_attr_val.v.val_str = node;
31806 }
31807 }
31808
31809 /* With LTO early dwarf was really finished at compile-time, so make
31810 sure to adjust the phase after annotating the LTRANS CU DIE. */
31811 if (in_lto_p)
31812 {
31813 early_dwarf_finished = true;
31814 return;
31815 }
31816
31817 /* Walk through the list of incomplete types again, trying once more to
31818 emit full debugging info for them. */
31819 retry_incomplete_types ();
31820
31821 /* The point here is to flush out the limbo list so that it is empty
31822 and we don't need to stream it for LTO. */
31823 flush_limbo_die_list ();
31824
31825 gen_scheduled_generic_parms_dies ();
31826 gen_remaining_tmpl_value_param_die_attribute ();
31827
31828 /* Add DW_AT_linkage_name for all deferred DIEs. */
31829 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31830 {
31831 tree decl = node->created_for;
31832 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31833 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31834 ended up in deferred_asm_name before we knew it was
31835 constant and never written to disk. */
31836 && DECL_ASSEMBLER_NAME (decl))
31837 {
31838 add_linkage_attr (node->die, decl);
31839 move_linkage_attr (node->die);
31840 }
31841 }
31842 deferred_asm_name = NULL;
31843
31844 if (flag_eliminate_unused_debug_types)
31845 prune_unused_types ();
31846
31847 /* Generate separate COMDAT sections for type DIEs. */
31848 if (use_debug_types)
31849 {
31850 break_out_comdat_types (comp_unit_die ());
31851
31852 /* Each new type_unit DIE was added to the limbo die list when created.
31853 Since these have all been added to comdat_type_list, clear the
31854 limbo die list. */
31855 limbo_die_list = NULL;
31856
31857 /* For each new comdat type unit, copy declarations for incomplete
31858 types to make the new unit self-contained (i.e., no direct
31859 references to the main compile unit). */
31860 for (comdat_type_node *ctnode = comdat_type_list;
31861 ctnode != NULL; ctnode = ctnode->next)
31862 copy_decls_for_unworthy_types (ctnode->root_die);
31863 copy_decls_for_unworthy_types (comp_unit_die ());
31864
31865 /* In the process of copying declarations from one unit to another,
31866 we may have left some declarations behind that are no longer
31867 referenced. Prune them. */
31868 prune_unused_types ();
31869 }
31870
31871 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31872 with dw_val_class_decl_ref operand. */
31873 note_variable_value (comp_unit_die ());
31874 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31875 note_variable_value (node->die);
31876 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31877 ctnode = ctnode->next)
31878 note_variable_value (ctnode->root_die);
31879 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31880 note_variable_value (node->die);
31881
31882 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31883 both the main_cu and all skeleton TUs. Making this call unconditional
31884 would end up either adding a second copy of the AT_pubnames attribute, or
31885 requiring a special case in add_top_level_skeleton_die_attrs. */
31886 if (!dwarf_split_debug_info)
31887 add_AT_pubnames (comp_unit_die ());
31888
31889 /* The early debug phase is now finished. */
31890 early_dwarf_finished = true;
31891
31892 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
31893 if ((!flag_generate_lto && !flag_generate_offload)
31894 /* FIXME: Disable debug info generation for PE-COFF targets since the
31895 copy_lto_debug_sections operation of the simple object support in
31896 libiberty is not implemented for them yet. */
31897 || TARGET_PECOFF)
31898 return;
31899
31900 /* Now as we are going to output for LTO initialize sections and labels
31901 to the LTO variants. We don't need a random-seed postfix as other
31902 LTO sections as linking the LTO debug sections into one in a partial
31903 link is fine. */
31904 init_sections_and_labels (true);
31905
31906 /* The output below is modeled after dwarf2out_finish with all
31907 location related output removed and some LTO specific changes.
31908 Some refactoring might make both smaller and easier to match up. */
31909
31910 /* Traverse the DIE's and add add sibling attributes to those DIE's
31911 that have children. */
31912 add_sibling_attributes (comp_unit_die ());
31913 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31914 add_sibling_attributes (node->die);
31915 for (comdat_type_node *ctnode = comdat_type_list;
31916 ctnode != NULL; ctnode = ctnode->next)
31917 add_sibling_attributes (ctnode->root_die);
31918
31919 /* AIX Assembler inserts the length, so adjust the reference to match the
31920 offset expected by debuggers. */
31921 strcpy (dl_section_ref, debug_line_section_label);
31922 if (XCOFF_DEBUGGING_INFO)
31923 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31924
31925 if (debug_info_level >= DINFO_LEVEL_TERSE)
31926 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
31927
31928 if (have_macinfo)
31929 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31930 macinfo_section_label);
31931
31932 save_macinfo_strings ();
31933
31934 if (dwarf_split_debug_info)
31935 {
31936 unsigned int index = 0;
31937 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31938 }
31939
31940 /* Output all of the compilation units. We put the main one last so that
31941 the offsets are available to output_pubnames. */
31942 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31943 output_comp_unit (node->die, 0, NULL);
31944
31945 hash_table<comdat_type_hasher> comdat_type_table (100);
31946 for (comdat_type_node *ctnode = comdat_type_list;
31947 ctnode != NULL; ctnode = ctnode->next)
31948 {
31949 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31950
31951 /* Don't output duplicate types. */
31952 if (*slot != HTAB_EMPTY_ENTRY)
31953 continue;
31954
31955 /* Add a pointer to the line table for the main compilation unit
31956 so that the debugger can make sense of DW_AT_decl_file
31957 attributes. */
31958 if (debug_info_level >= DINFO_LEVEL_TERSE)
31959 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31960 (!dwarf_split_debug_info
31961 ? debug_line_section_label
31962 : debug_skeleton_line_section_label));
31963
31964 output_comdat_type_unit (ctnode);
31965 *slot = ctnode;
31966 }
31967
31968 /* Stick a unique symbol to the main debuginfo section. */
31969 compute_comp_unit_symbol (comp_unit_die ());
31970
31971 /* Output the main compilation unit. We always need it if only for
31972 the CU symbol. */
31973 output_comp_unit (comp_unit_die (), true, NULL);
31974
31975 /* Output the abbreviation table. */
31976 if (vec_safe_length (abbrev_die_table) != 1)
31977 {
31978 switch_to_section (debug_abbrev_section);
31979 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31980 output_abbrev_section ();
31981 }
31982
31983 /* Have to end the macro section. */
31984 if (have_macinfo)
31985 {
31986 /* We have to save macinfo state if we need to output it again
31987 for the FAT part of the object. */
31988 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
31989 if (flag_fat_lto_objects)
31990 macinfo_table = macinfo_table->copy ();
31991
31992 switch_to_section (debug_macinfo_section);
31993 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31994 output_macinfo (debug_line_section_label, true);
31995 dw2_asm_output_data (1, 0, "End compilation unit");
31996
31997 if (flag_fat_lto_objects)
31998 {
31999 vec_free (macinfo_table);
32000 macinfo_table = saved_macinfo_table;
32001 }
32002 }
32003
32004 /* Emit a skeleton debug_line section. */
32005 switch_to_section (debug_line_section);
32006 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32007 output_line_info (true);
32008
32009 /* If we emitted any indirect strings, output the string table too. */
32010 if (debug_str_hash || skeleton_debug_str_hash)
32011 output_indirect_strings ();
32012
32013 /* Switch back to the text section. */
32014 switch_to_section (text_section);
32015 }
32016
32017 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32018 within the same process. For use by toplev::finalize. */
32019
32020 void
32021 dwarf2out_c_finalize (void)
32022 {
32023 last_var_location_insn = NULL;
32024 cached_next_real_insn = NULL;
32025 used_rtx_array = NULL;
32026 incomplete_types = NULL;
32027 decl_scope_table = NULL;
32028 debug_info_section = NULL;
32029 debug_skeleton_info_section = NULL;
32030 debug_abbrev_section = NULL;
32031 debug_skeleton_abbrev_section = NULL;
32032 debug_aranges_section = NULL;
32033 debug_addr_section = NULL;
32034 debug_macinfo_section = NULL;
32035 debug_line_section = NULL;
32036 debug_skeleton_line_section = NULL;
32037 debug_loc_section = NULL;
32038 debug_pubnames_section = NULL;
32039 debug_pubtypes_section = NULL;
32040 debug_str_section = NULL;
32041 debug_line_str_section = NULL;
32042 debug_str_dwo_section = NULL;
32043 debug_str_offsets_section = NULL;
32044 debug_ranges_section = NULL;
32045 debug_frame_section = NULL;
32046 fde_vec = NULL;
32047 debug_str_hash = NULL;
32048 debug_line_str_hash = NULL;
32049 skeleton_debug_str_hash = NULL;
32050 dw2_string_counter = 0;
32051 have_multiple_function_sections = false;
32052 text_section_used = false;
32053 cold_text_section_used = false;
32054 cold_text_section = NULL;
32055 current_unit_personality = NULL;
32056
32057 early_dwarf = false;
32058 early_dwarf_finished = false;
32059
32060 next_die_offset = 0;
32061 single_comp_unit_die = NULL;
32062 comdat_type_list = NULL;
32063 limbo_die_list = NULL;
32064 file_table = NULL;
32065 decl_die_table = NULL;
32066 common_block_die_table = NULL;
32067 decl_loc_table = NULL;
32068 call_arg_locations = NULL;
32069 call_arg_loc_last = NULL;
32070 call_site_count = -1;
32071 tail_call_site_count = -1;
32072 cached_dw_loc_list_table = NULL;
32073 abbrev_die_table = NULL;
32074 delete dwarf_proc_stack_usage_map;
32075 dwarf_proc_stack_usage_map = NULL;
32076 line_info_label_num = 0;
32077 cur_line_info_table = NULL;
32078 text_section_line_info = NULL;
32079 cold_text_section_line_info = NULL;
32080 separate_line_info = NULL;
32081 info_section_emitted = false;
32082 pubname_table = NULL;
32083 pubtype_table = NULL;
32084 macinfo_table = NULL;
32085 ranges_table = NULL;
32086 ranges_by_label = NULL;
32087 rnglist_idx = 0;
32088 have_location_lists = false;
32089 loclabel_num = 0;
32090 poc_label_num = 0;
32091 last_emitted_file = NULL;
32092 label_num = 0;
32093 tmpl_value_parm_die_table = NULL;
32094 generic_type_instances = NULL;
32095 frame_pointer_fb_offset = 0;
32096 frame_pointer_fb_offset_valid = false;
32097 base_types.release ();
32098 XDELETEVEC (producer_string);
32099 producer_string = NULL;
32100 }
32101
32102 #include "gt-dwarf2out.h"