passes.c (ipa_write_summaries): Only modify statements if body is in memory.
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105
106 #ifndef XCOFF_DEBUGGING_INFO
107 #define XCOFF_DEBUGGING_INFO 0
108 #endif
109
110 #ifndef HAVE_XCOFF_DWARF_EXTRAS
111 #define HAVE_XCOFF_DWARF_EXTRAS 0
112 #endif
113
114 #ifdef VMS_DEBUGGING_INFO
115 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
116
117 /* Define this macro to be a nonzero value if the directory specifications
118 which are output in the debug info should end with a separator. */
119 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
120 /* Define this macro to evaluate to a nonzero value if GCC should refrain
121 from generating indirect strings in DWARF2 debug information, for instance
122 if your target is stuck with an old version of GDB that is unable to
123 process them properly or uses VMS Debug. */
124 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
125 #else
126 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
127 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
128 #endif
129
130 /* ??? Poison these here until it can be done generically. They've been
131 totally replaced in this file; make sure it stays that way. */
132 #undef DWARF2_UNWIND_INFO
133 #undef DWARF2_FRAME_INFO
134 #if (GCC_VERSION >= 3000)
135 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
136 #endif
137
138 /* The size of the target's pointer type. */
139 #ifndef PTR_SIZE
140 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
141 #endif
142
143 /* Array of RTXes referenced by the debugging information, which therefore
144 must be kept around forever. */
145 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
146
147 /* A pointer to the base of a list of incomplete types which might be
148 completed at some later time. incomplete_types_list needs to be a
149 vec<tree, va_gc> *because we want to tell the garbage collector about
150 it. */
151 static GTY(()) vec<tree, va_gc> *incomplete_types;
152
153 /* A pointer to the base of a table of references to declaration
154 scopes. This table is a display which tracks the nesting
155 of declaration scopes at the current scope and containing
156 scopes. This table is used to find the proper place to
157 define type declaration DIE's. */
158 static GTY(()) vec<tree, va_gc> *decl_scope_table;
159
160 /* Pointers to various DWARF2 sections. */
161 static GTY(()) section *debug_info_section;
162 static GTY(()) section *debug_skeleton_info_section;
163 static GTY(()) section *debug_abbrev_section;
164 static GTY(()) section *debug_skeleton_abbrev_section;
165 static GTY(()) section *debug_aranges_section;
166 static GTY(()) section *debug_addr_section;
167 static GTY(()) section *debug_macinfo_section;
168 static const char *debug_macinfo_section_name;
169 static unsigned macinfo_label_base = 1;
170 static GTY(()) section *debug_line_section;
171 static GTY(()) section *debug_skeleton_line_section;
172 static GTY(()) section *debug_loc_section;
173 static GTY(()) section *debug_pubnames_section;
174 static GTY(()) section *debug_pubtypes_section;
175 static GTY(()) section *debug_str_section;
176 static GTY(()) section *debug_line_str_section;
177 static GTY(()) section *debug_str_dwo_section;
178 static GTY(()) section *debug_str_offsets_section;
179 static GTY(()) section *debug_ranges_section;
180 static GTY(()) section *debug_frame_section;
181
182 /* Maximum size (in bytes) of an artificially generated label. */
183 #define MAX_ARTIFICIAL_LABEL_BYTES 40
184
185 /* According to the (draft) DWARF 3 specification, the initial length
186 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
187 bytes are 0xffffffff, followed by the length stored in the next 8
188 bytes.
189
190 However, the SGI/MIPS ABI uses an initial length which is equal to
191 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
192
193 #ifndef DWARF_INITIAL_LENGTH_SIZE
194 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
195 #endif
196
197 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
198 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
199 #endif
200
201 /* Round SIZE up to the nearest BOUNDARY. */
202 #define DWARF_ROUND(SIZE,BOUNDARY) \
203 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
204
205 /* CIE identifier. */
206 #if HOST_BITS_PER_WIDE_INT >= 64
207 #define DWARF_CIE_ID \
208 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
209 #else
210 #define DWARF_CIE_ID DW_CIE_ID
211 #endif
212
213
214 /* A vector for a table that contains frame description
215 information for each routine. */
216 #define NOT_INDEXED (-1U)
217 #define NO_INDEX_ASSIGNED (-2U)
218
219 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
220
221 struct GTY((for_user)) indirect_string_node {
222 const char *str;
223 unsigned int refcount;
224 enum dwarf_form form;
225 char *label;
226 unsigned int index;
227 };
228
229 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
230 {
231 typedef const char *compare_type;
232
233 static hashval_t hash (indirect_string_node *);
234 static bool equal (indirect_string_node *, const char *);
235 };
236
237 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
238
239 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
240
241 /* With split_debug_info, both the comp_dir and dwo_name go in the
242 main object file, rather than the dwo, similar to the force_direct
243 parameter elsewhere but with additional complications:
244
245 1) The string is needed in both the main object file and the dwo.
246 That is, the comp_dir and dwo_name will appear in both places.
247
248 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
249 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
250
251 3) GCC chooses the form to use late, depending on the size and
252 reference count.
253
254 Rather than forcing the all debug string handling functions and
255 callers to deal with these complications, simply use a separate,
256 special-cased string table for any attribute that should go in the
257 main object file. This limits the complexity to just the places
258 that need it. */
259
260 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
261
262 static GTY(()) int dw2_string_counter;
263
264 /* True if the compilation unit places functions in more than one section. */
265 static GTY(()) bool have_multiple_function_sections = false;
266
267 /* Whether the default text and cold text sections have been used at all. */
268 static GTY(()) bool text_section_used = false;
269 static GTY(()) bool cold_text_section_used = false;
270
271 /* The default cold text section. */
272 static GTY(()) section *cold_text_section;
273
274 /* The DIE for C++14 'auto' in a function return type. */
275 static GTY(()) dw_die_ref auto_die;
276
277 /* The DIE for C++14 'decltype(auto)' in a function return type. */
278 static GTY(()) dw_die_ref decltype_auto_die;
279
280 /* Forward declarations for functions defined in this file. */
281
282 static void output_call_frame_info (int);
283 static void dwarf2out_note_section_used (void);
284
285 /* Personality decl of current unit. Used only when assembler does not support
286 personality CFI. */
287 static GTY(()) rtx current_unit_personality;
288
289 /* Whether an eh_frame section is required. */
290 static GTY(()) bool do_eh_frame = false;
291
292 /* .debug_rnglists next index. */
293 static unsigned int rnglist_idx;
294
295 /* Data and reference forms for relocatable data. */
296 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
297 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
298
299 #ifndef DEBUG_FRAME_SECTION
300 #define DEBUG_FRAME_SECTION ".debug_frame"
301 #endif
302
303 #ifndef FUNC_BEGIN_LABEL
304 #define FUNC_BEGIN_LABEL "LFB"
305 #endif
306
307 #ifndef FUNC_END_LABEL
308 #define FUNC_END_LABEL "LFE"
309 #endif
310
311 #ifndef PROLOGUE_END_LABEL
312 #define PROLOGUE_END_LABEL "LPE"
313 #endif
314
315 #ifndef EPILOGUE_BEGIN_LABEL
316 #define EPILOGUE_BEGIN_LABEL "LEB"
317 #endif
318
319 #ifndef FRAME_BEGIN_LABEL
320 #define FRAME_BEGIN_LABEL "Lframe"
321 #endif
322 #define CIE_AFTER_SIZE_LABEL "LSCIE"
323 #define CIE_END_LABEL "LECIE"
324 #define FDE_LABEL "LSFDE"
325 #define FDE_AFTER_SIZE_LABEL "LASFDE"
326 #define FDE_END_LABEL "LEFDE"
327 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
328 #define LINE_NUMBER_END_LABEL "LELT"
329 #define LN_PROLOG_AS_LABEL "LASLTP"
330 #define LN_PROLOG_END_LABEL "LELTP"
331 #define DIE_LABEL_PREFIX "DW"
332 \f
333 /* Match the base name of a file to the base name of a compilation unit. */
334
335 static int
336 matches_main_base (const char *path)
337 {
338 /* Cache the last query. */
339 static const char *last_path = NULL;
340 static int last_match = 0;
341 if (path != last_path)
342 {
343 const char *base;
344 int length = base_of_path (path, &base);
345 last_path = path;
346 last_match = (length == main_input_baselength
347 && memcmp (base, main_input_basename, length) == 0);
348 }
349 return last_match;
350 }
351
352 #ifdef DEBUG_DEBUG_STRUCT
353
354 static int
355 dump_struct_debug (tree type, enum debug_info_usage usage,
356 enum debug_struct_file criterion, int generic,
357 int matches, int result)
358 {
359 /* Find the type name. */
360 tree type_decl = TYPE_STUB_DECL (type);
361 tree t = type_decl;
362 const char *name = 0;
363 if (TREE_CODE (t) == TYPE_DECL)
364 t = DECL_NAME (t);
365 if (t)
366 name = IDENTIFIER_POINTER (t);
367
368 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
369 criterion,
370 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
371 matches ? "bas" : "hdr",
372 generic ? "gen" : "ord",
373 usage == DINFO_USAGE_DFN ? ";" :
374 usage == DINFO_USAGE_DIR_USE ? "." : "*",
375 result,
376 (void*) type_decl, name);
377 return result;
378 }
379 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
380 dump_struct_debug (type, usage, criterion, generic, matches, result)
381
382 #else
383
384 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
385 (result)
386
387 #endif
388
389 /* Get the number of HOST_WIDE_INTs needed to represent the precision
390 of the number. Some constants have a large uniform precision, so
391 we get the precision needed for the actual value of the number. */
392
393 static unsigned int
394 get_full_len (const wide_int &op)
395 {
396 int prec = wi::min_precision (op, UNSIGNED);
397 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
398 / HOST_BITS_PER_WIDE_INT);
399 }
400
401 static bool
402 should_emit_struct_debug (tree type, enum debug_info_usage usage)
403 {
404 enum debug_struct_file criterion;
405 tree type_decl;
406 bool generic = lang_hooks.types.generic_p (type);
407
408 if (generic)
409 criterion = debug_struct_generic[usage];
410 else
411 criterion = debug_struct_ordinary[usage];
412
413 if (criterion == DINFO_STRUCT_FILE_NONE)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
415 if (criterion == DINFO_STRUCT_FILE_ANY)
416 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
417
418 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
419
420 if (type_decl != NULL)
421 {
422 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
423 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
424
425 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
426 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
427 }
428
429 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
430 }
431 \f
432 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
433 switch to the data section instead, and write out a synthetic start label
434 for collect2 the first time around. */
435
436 static void
437 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
438 {
439 if (eh_frame_section == 0)
440 {
441 int flags;
442
443 if (EH_TABLES_CAN_BE_READ_ONLY)
444 {
445 int fde_encoding;
446 int per_encoding;
447 int lsda_encoding;
448
449 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
450 /*global=*/0);
451 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
452 /*global=*/1);
453 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
454 /*global=*/0);
455 flags = ((! flag_pic
456 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
457 && (fde_encoding & 0x70) != DW_EH_PE_aligned
458 && (per_encoding & 0x70) != DW_EH_PE_absptr
459 && (per_encoding & 0x70) != DW_EH_PE_aligned
460 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
461 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
462 ? 0 : SECTION_WRITE);
463 }
464 else
465 flags = SECTION_WRITE;
466
467 #ifdef EH_FRAME_SECTION_NAME
468 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
469 #else
470 eh_frame_section = ((flags == SECTION_WRITE)
471 ? data_section : readonly_data_section);
472 #endif /* EH_FRAME_SECTION_NAME */
473 }
474
475 switch_to_section (eh_frame_section);
476
477 #ifdef EH_FRAME_THROUGH_COLLECT2
478 /* We have no special eh_frame section. Emit special labels to guide
479 collect2. */
480 if (!back)
481 {
482 tree label = get_file_function_name ("F");
483 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
484 targetm.asm_out.globalize_label (asm_out_file,
485 IDENTIFIER_POINTER (label));
486 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
487 }
488 #endif
489 }
490
491 /* Switch [BACK] to the eh or debug frame table section, depending on
492 FOR_EH. */
493
494 static void
495 switch_to_frame_table_section (int for_eh, bool back)
496 {
497 if (for_eh)
498 switch_to_eh_frame_section (back);
499 else
500 {
501 if (!debug_frame_section)
502 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
503 SECTION_DEBUG, NULL);
504 switch_to_section (debug_frame_section);
505 }
506 }
507
508 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
509
510 enum dw_cfi_oprnd_type
511 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
512 {
513 switch (cfi)
514 {
515 case DW_CFA_nop:
516 case DW_CFA_GNU_window_save:
517 case DW_CFA_remember_state:
518 case DW_CFA_restore_state:
519 return dw_cfi_oprnd_unused;
520
521 case DW_CFA_set_loc:
522 case DW_CFA_advance_loc1:
523 case DW_CFA_advance_loc2:
524 case DW_CFA_advance_loc4:
525 case DW_CFA_MIPS_advance_loc8:
526 return dw_cfi_oprnd_addr;
527
528 case DW_CFA_offset:
529 case DW_CFA_offset_extended:
530 case DW_CFA_def_cfa:
531 case DW_CFA_offset_extended_sf:
532 case DW_CFA_def_cfa_sf:
533 case DW_CFA_restore:
534 case DW_CFA_restore_extended:
535 case DW_CFA_undefined:
536 case DW_CFA_same_value:
537 case DW_CFA_def_cfa_register:
538 case DW_CFA_register:
539 case DW_CFA_expression:
540 case DW_CFA_val_expression:
541 return dw_cfi_oprnd_reg_num;
542
543 case DW_CFA_def_cfa_offset:
544 case DW_CFA_GNU_args_size:
545 case DW_CFA_def_cfa_offset_sf:
546 return dw_cfi_oprnd_offset;
547
548 case DW_CFA_def_cfa_expression:
549 return dw_cfi_oprnd_loc;
550
551 default:
552 gcc_unreachable ();
553 }
554 }
555
556 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
557
558 enum dw_cfi_oprnd_type
559 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
560 {
561 switch (cfi)
562 {
563 case DW_CFA_def_cfa:
564 case DW_CFA_def_cfa_sf:
565 case DW_CFA_offset:
566 case DW_CFA_offset_extended_sf:
567 case DW_CFA_offset_extended:
568 return dw_cfi_oprnd_offset;
569
570 case DW_CFA_register:
571 return dw_cfi_oprnd_reg_num;
572
573 case DW_CFA_expression:
574 case DW_CFA_val_expression:
575 return dw_cfi_oprnd_loc;
576
577 case DW_CFA_def_cfa_expression:
578 return dw_cfi_oprnd_cfa_loc;
579
580 default:
581 return dw_cfi_oprnd_unused;
582 }
583 }
584
585 /* Output one FDE. */
586
587 static void
588 output_fde (dw_fde_ref fde, bool for_eh, bool second,
589 char *section_start_label, int fde_encoding, char *augmentation,
590 bool any_lsda_needed, int lsda_encoding)
591 {
592 const char *begin, *end;
593 static unsigned int j;
594 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
595
596 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
597 /* empty */ 0);
598 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
599 for_eh + j);
600 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
601 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
602 if (!XCOFF_DEBUGGING_INFO || for_eh)
603 {
604 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
605 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
606 " indicating 64-bit DWARF extension");
607 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
608 "FDE Length");
609 }
610 ASM_OUTPUT_LABEL (asm_out_file, l1);
611
612 if (for_eh)
613 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
614 else
615 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
616 debug_frame_section, "FDE CIE offset");
617
618 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
619 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
620
621 if (for_eh)
622 {
623 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
624 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
625 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
626 "FDE initial location");
627 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
628 end, begin, "FDE address range");
629 }
630 else
631 {
632 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
633 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
634 }
635
636 if (augmentation[0])
637 {
638 if (any_lsda_needed)
639 {
640 int size = size_of_encoded_value (lsda_encoding);
641
642 if (lsda_encoding == DW_EH_PE_aligned)
643 {
644 int offset = ( 4 /* Length */
645 + 4 /* CIE offset */
646 + 2 * size_of_encoded_value (fde_encoding)
647 + 1 /* Augmentation size */ );
648 int pad = -offset & (PTR_SIZE - 1);
649
650 size += pad;
651 gcc_assert (size_of_uleb128 (size) == 1);
652 }
653
654 dw2_asm_output_data_uleb128 (size, "Augmentation size");
655
656 if (fde->uses_eh_lsda)
657 {
658 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
659 fde->funcdef_number);
660 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
661 gen_rtx_SYMBOL_REF (Pmode, l1),
662 false,
663 "Language Specific Data Area");
664 }
665 else
666 {
667 if (lsda_encoding == DW_EH_PE_aligned)
668 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
669 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
670 "Language Specific Data Area (none)");
671 }
672 }
673 else
674 dw2_asm_output_data_uleb128 (0, "Augmentation size");
675 }
676
677 /* Loop through the Call Frame Instructions associated with this FDE. */
678 fde->dw_fde_current_label = begin;
679 {
680 size_t from, until, i;
681
682 from = 0;
683 until = vec_safe_length (fde->dw_fde_cfi);
684
685 if (fde->dw_fde_second_begin == NULL)
686 ;
687 else if (!second)
688 until = fde->dw_fde_switch_cfi_index;
689 else
690 from = fde->dw_fde_switch_cfi_index;
691
692 for (i = from; i < until; i++)
693 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
694 }
695
696 /* If we are to emit a ref/link from function bodies to their frame tables,
697 do it now. This is typically performed to make sure that tables
698 associated with functions are dragged with them and not discarded in
699 garbage collecting links. We need to do this on a per function basis to
700 cope with -ffunction-sections. */
701
702 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
703 /* Switch to the function section, emit the ref to the tables, and
704 switch *back* into the table section. */
705 switch_to_section (function_section (fde->decl));
706 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
707 switch_to_frame_table_section (for_eh, true);
708 #endif
709
710 /* Pad the FDE out to an address sized boundary. */
711 ASM_OUTPUT_ALIGN (asm_out_file,
712 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
713 ASM_OUTPUT_LABEL (asm_out_file, l2);
714
715 j += 2;
716 }
717
718 /* Return true if frame description entry FDE is needed for EH. */
719
720 static bool
721 fde_needed_for_eh_p (dw_fde_ref fde)
722 {
723 if (flag_asynchronous_unwind_tables)
724 return true;
725
726 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
727 return true;
728
729 if (fde->uses_eh_lsda)
730 return true;
731
732 /* If exceptions are enabled, we have collected nothrow info. */
733 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
734 return false;
735
736 return true;
737 }
738
739 /* Output the call frame information used to record information
740 that relates to calculating the frame pointer, and records the
741 location of saved registers. */
742
743 static void
744 output_call_frame_info (int for_eh)
745 {
746 unsigned int i;
747 dw_fde_ref fde;
748 dw_cfi_ref cfi;
749 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
750 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
751 bool any_lsda_needed = false;
752 char augmentation[6];
753 int augmentation_size;
754 int fde_encoding = DW_EH_PE_absptr;
755 int per_encoding = DW_EH_PE_absptr;
756 int lsda_encoding = DW_EH_PE_absptr;
757 int return_reg;
758 rtx personality = NULL;
759 int dw_cie_version;
760
761 /* Don't emit a CIE if there won't be any FDEs. */
762 if (!fde_vec)
763 return;
764
765 /* Nothing to do if the assembler's doing it all. */
766 if (dwarf2out_do_cfi_asm ())
767 return;
768
769 /* If we don't have any functions we'll want to unwind out of, don't emit
770 any EH unwind information. If we make FDEs linkonce, we may have to
771 emit an empty label for an FDE that wouldn't otherwise be emitted. We
772 want to avoid having an FDE kept around when the function it refers to
773 is discarded. Example where this matters: a primary function template
774 in C++ requires EH information, an explicit specialization doesn't. */
775 if (for_eh)
776 {
777 bool any_eh_needed = false;
778
779 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
780 {
781 if (fde->uses_eh_lsda)
782 any_eh_needed = any_lsda_needed = true;
783 else if (fde_needed_for_eh_p (fde))
784 any_eh_needed = true;
785 else if (TARGET_USES_WEAK_UNWIND_INFO)
786 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
787 }
788
789 if (!any_eh_needed)
790 return;
791 }
792
793 /* We're going to be generating comments, so turn on app. */
794 if (flag_debug_asm)
795 app_enable ();
796
797 /* Switch to the proper frame section, first time. */
798 switch_to_frame_table_section (for_eh, false);
799
800 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
801 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
802
803 /* Output the CIE. */
804 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
805 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
806 if (!XCOFF_DEBUGGING_INFO || for_eh)
807 {
808 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
809 dw2_asm_output_data (4, 0xffffffff,
810 "Initial length escape value indicating 64-bit DWARF extension");
811 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
812 "Length of Common Information Entry");
813 }
814 ASM_OUTPUT_LABEL (asm_out_file, l1);
815
816 /* Now that the CIE pointer is PC-relative for EH,
817 use 0 to identify the CIE. */
818 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
819 (for_eh ? 0 : DWARF_CIE_ID),
820 "CIE Identifier Tag");
821
822 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
823 use CIE version 1, unless that would produce incorrect results
824 due to overflowing the return register column. */
825 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
826 dw_cie_version = 1;
827 if (return_reg >= 256 || dwarf_version > 2)
828 dw_cie_version = 3;
829 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
830
831 augmentation[0] = 0;
832 augmentation_size = 0;
833
834 personality = current_unit_personality;
835 if (for_eh)
836 {
837 char *p;
838
839 /* Augmentation:
840 z Indicates that a uleb128 is present to size the
841 augmentation section.
842 L Indicates the encoding (and thus presence) of
843 an LSDA pointer in the FDE augmentation.
844 R Indicates a non-default pointer encoding for
845 FDE code pointers.
846 P Indicates the presence of an encoding + language
847 personality routine in the CIE augmentation. */
848
849 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
850 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
851 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
852
853 p = augmentation + 1;
854 if (personality)
855 {
856 *p++ = 'P';
857 augmentation_size += 1 + size_of_encoded_value (per_encoding);
858 assemble_external_libcall (personality);
859 }
860 if (any_lsda_needed)
861 {
862 *p++ = 'L';
863 augmentation_size += 1;
864 }
865 if (fde_encoding != DW_EH_PE_absptr)
866 {
867 *p++ = 'R';
868 augmentation_size += 1;
869 }
870 if (p > augmentation + 1)
871 {
872 augmentation[0] = 'z';
873 *p = '\0';
874 }
875
876 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
877 if (personality && per_encoding == DW_EH_PE_aligned)
878 {
879 int offset = ( 4 /* Length */
880 + 4 /* CIE Id */
881 + 1 /* CIE version */
882 + strlen (augmentation) + 1 /* Augmentation */
883 + size_of_uleb128 (1) /* Code alignment */
884 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
885 + 1 /* RA column */
886 + 1 /* Augmentation size */
887 + 1 /* Personality encoding */ );
888 int pad = -offset & (PTR_SIZE - 1);
889
890 augmentation_size += pad;
891
892 /* Augmentations should be small, so there's scarce need to
893 iterate for a solution. Die if we exceed one uleb128 byte. */
894 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
895 }
896 }
897
898 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
899 if (dw_cie_version >= 4)
900 {
901 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
902 dw2_asm_output_data (1, 0, "CIE Segment Size");
903 }
904 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
905 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
906 "CIE Data Alignment Factor");
907
908 if (dw_cie_version == 1)
909 dw2_asm_output_data (1, return_reg, "CIE RA Column");
910 else
911 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
912
913 if (augmentation[0])
914 {
915 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
916 if (personality)
917 {
918 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
919 eh_data_format_name (per_encoding));
920 dw2_asm_output_encoded_addr_rtx (per_encoding,
921 personality,
922 true, NULL);
923 }
924
925 if (any_lsda_needed)
926 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
927 eh_data_format_name (lsda_encoding));
928
929 if (fde_encoding != DW_EH_PE_absptr)
930 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
931 eh_data_format_name (fde_encoding));
932 }
933
934 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
935 output_cfi (cfi, NULL, for_eh);
936
937 /* Pad the CIE out to an address sized boundary. */
938 ASM_OUTPUT_ALIGN (asm_out_file,
939 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
940 ASM_OUTPUT_LABEL (asm_out_file, l2);
941
942 /* Loop through all of the FDE's. */
943 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
944 {
945 unsigned int k;
946
947 /* Don't emit EH unwind info for leaf functions that don't need it. */
948 if (for_eh && !fde_needed_for_eh_p (fde))
949 continue;
950
951 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
952 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
953 augmentation, any_lsda_needed, lsda_encoding);
954 }
955
956 if (for_eh && targetm.terminate_dw2_eh_frame_info)
957 dw2_asm_output_data (4, 0, "End of Table");
958
959 /* Turn off app to make assembly quicker. */
960 if (flag_debug_asm)
961 app_disable ();
962 }
963
964 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
965
966 static void
967 dwarf2out_do_cfi_startproc (bool second)
968 {
969 int enc;
970 rtx ref;
971
972 fprintf (asm_out_file, "\t.cfi_startproc\n");
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting .cfi_personality directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 section *sect;
1223 dw_fde_ref fde = cfun->fde;
1224
1225 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1226
1227 if (!in_cold_section_p)
1228 {
1229 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1230 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1231 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1232 }
1233 else
1234 {
1235 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1236 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1237 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1238 }
1239 have_multiple_function_sections = true;
1240
1241 /* There is no need to mark used sections when not debugging. */
1242 if (cold_text_section != NULL)
1243 dwarf2out_note_section_used ();
1244
1245 if (dwarf2out_do_cfi_asm ())
1246 fprintf (asm_out_file, "\t.cfi_endproc\n");
1247
1248 /* Now do the real section switch. */
1249 sect = current_function_section ();
1250 switch_to_section (sect);
1251
1252 fde->second_in_std_section
1253 = (sect == text_section
1254 || (cold_text_section && sect == cold_text_section));
1255
1256 if (dwarf2out_do_cfi_asm ())
1257 dwarf2out_do_cfi_startproc (true);
1258
1259 var_location_switch_text_section ();
1260
1261 if (cold_text_section != NULL)
1262 set_cur_line_info_table (sect);
1263 }
1264 \f
1265 /* And now, the subset of the debugging information support code necessary
1266 for emitting location expressions. */
1267
1268 /* Data about a single source file. */
1269 struct GTY((for_user)) dwarf_file_data {
1270 const char * filename;
1271 int emitted_number;
1272 };
1273
1274 /* Describe an entry into the .debug_addr section. */
1275
1276 enum ate_kind {
1277 ate_kind_rtx,
1278 ate_kind_rtx_dtprel,
1279 ate_kind_label
1280 };
1281
1282 struct GTY((for_user)) addr_table_entry {
1283 enum ate_kind kind;
1284 unsigned int refcount;
1285 unsigned int index;
1286 union addr_table_entry_struct_union
1287 {
1288 rtx GTY ((tag ("0"))) rtl;
1289 char * GTY ((tag ("1"))) label;
1290 }
1291 GTY ((desc ("%1.kind"))) addr;
1292 };
1293
1294 typedef unsigned int var_loc_view;
1295
1296 /* Location lists are ranges + location descriptions for that range,
1297 so you can track variables that are in different places over
1298 their entire life. */
1299 typedef struct GTY(()) dw_loc_list_struct {
1300 dw_loc_list_ref dw_loc_next;
1301 const char *begin; /* Label and addr_entry for start of range */
1302 addr_table_entry *begin_entry;
1303 const char *end; /* Label for end of range */
1304 char *ll_symbol; /* Label for beginning of location list.
1305 Only on head of list. */
1306 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1307 const char *section; /* Section this loclist is relative to */
1308 dw_loc_descr_ref expr;
1309 var_loc_view vbegin, vend;
1310 hashval_t hash;
1311 /* True if all addresses in this and subsequent lists are known to be
1312 resolved. */
1313 bool resolved_addr;
1314 /* True if this list has been replaced by dw_loc_next. */
1315 bool replaced;
1316 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1317 section. */
1318 unsigned char emitted : 1;
1319 /* True if hash field is index rather than hash value. */
1320 unsigned char num_assigned : 1;
1321 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1322 unsigned char offset_emitted : 1;
1323 /* True if note_variable_value_in_expr has been called on it. */
1324 unsigned char noted_variable_value : 1;
1325 /* True if the range should be emitted even if begin and end
1326 are the same. */
1327 bool force;
1328 } dw_loc_list_node;
1329
1330 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1331 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1332
1333 /* Convert a DWARF stack opcode into its string name. */
1334
1335 static const char *
1336 dwarf_stack_op_name (unsigned int op)
1337 {
1338 const char *name = get_DW_OP_name (op);
1339
1340 if (name != NULL)
1341 return name;
1342
1343 return "OP_<unknown>";
1344 }
1345
1346 /* Return TRUE iff we're to output location view lists as a separate
1347 attribute next to the location lists, as an extension compatible
1348 with DWARF 2 and above. */
1349
1350 static inline bool
1351 dwarf2out_locviews_in_attribute ()
1352 {
1353 return debug_variable_location_views == 1;
1354 }
1355
1356 /* Return TRUE iff we're to output location view lists as part of the
1357 location lists, as proposed for standardization after DWARF 5. */
1358
1359 static inline bool
1360 dwarf2out_locviews_in_loclist ()
1361 {
1362 #ifndef DW_LLE_view_pair
1363 return false;
1364 #else
1365 return debug_variable_location_views == -1;
1366 #endif
1367 }
1368
1369 /* Return a pointer to a newly allocated location description. Location
1370 descriptions are simple expression terms that can be strung
1371 together to form more complicated location (address) descriptions. */
1372
1373 static inline dw_loc_descr_ref
1374 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1375 unsigned HOST_WIDE_INT oprnd2)
1376 {
1377 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1378
1379 descr->dw_loc_opc = op;
1380 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1381 descr->dw_loc_oprnd1.val_entry = NULL;
1382 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1383 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1384 descr->dw_loc_oprnd2.val_entry = NULL;
1385 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1386
1387 return descr;
1388 }
1389
1390 /* Add a location description term to a location description expression. */
1391
1392 static inline void
1393 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1394 {
1395 dw_loc_descr_ref *d;
1396
1397 /* Find the end of the chain. */
1398 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1399 ;
1400
1401 *d = descr;
1402 }
1403
1404 /* Compare two location operands for exact equality. */
1405
1406 static bool
1407 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1408 {
1409 if (a->val_class != b->val_class)
1410 return false;
1411 switch (a->val_class)
1412 {
1413 case dw_val_class_none:
1414 return true;
1415 case dw_val_class_addr:
1416 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1417
1418 case dw_val_class_offset:
1419 case dw_val_class_unsigned_const:
1420 case dw_val_class_const:
1421 case dw_val_class_unsigned_const_implicit:
1422 case dw_val_class_const_implicit:
1423 case dw_val_class_range_list:
1424 /* These are all HOST_WIDE_INT, signed or unsigned. */
1425 return a->v.val_unsigned == b->v.val_unsigned;
1426
1427 case dw_val_class_loc:
1428 return a->v.val_loc == b->v.val_loc;
1429 case dw_val_class_loc_list:
1430 return a->v.val_loc_list == b->v.val_loc_list;
1431 case dw_val_class_view_list:
1432 return a->v.val_view_list == b->v.val_view_list;
1433 case dw_val_class_die_ref:
1434 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1435 case dw_val_class_fde_ref:
1436 return a->v.val_fde_index == b->v.val_fde_index;
1437 case dw_val_class_symview:
1438 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1439 case dw_val_class_lbl_id:
1440 case dw_val_class_lineptr:
1441 case dw_val_class_macptr:
1442 case dw_val_class_loclistsptr:
1443 case dw_val_class_high_pc:
1444 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1445 case dw_val_class_str:
1446 return a->v.val_str == b->v.val_str;
1447 case dw_val_class_flag:
1448 return a->v.val_flag == b->v.val_flag;
1449 case dw_val_class_file:
1450 case dw_val_class_file_implicit:
1451 return a->v.val_file == b->v.val_file;
1452 case dw_val_class_decl_ref:
1453 return a->v.val_decl_ref == b->v.val_decl_ref;
1454
1455 case dw_val_class_const_double:
1456 return (a->v.val_double.high == b->v.val_double.high
1457 && a->v.val_double.low == b->v.val_double.low);
1458
1459 case dw_val_class_wide_int:
1460 return *a->v.val_wide == *b->v.val_wide;
1461
1462 case dw_val_class_vec:
1463 {
1464 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1465 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1466
1467 return (a_len == b_len
1468 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1469 }
1470
1471 case dw_val_class_data8:
1472 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1473
1474 case dw_val_class_vms_delta:
1475 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1476 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1477
1478 case dw_val_class_discr_value:
1479 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1480 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1481 case dw_val_class_discr_list:
1482 /* It makes no sense comparing two discriminant value lists. */
1483 return false;
1484 }
1485 gcc_unreachable ();
1486 }
1487
1488 /* Compare two location atoms for exact equality. */
1489
1490 static bool
1491 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1492 {
1493 if (a->dw_loc_opc != b->dw_loc_opc)
1494 return false;
1495
1496 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1497 address size, but since we always allocate cleared storage it
1498 should be zero for other types of locations. */
1499 if (a->dtprel != b->dtprel)
1500 return false;
1501
1502 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1503 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1504 }
1505
1506 /* Compare two complete location expressions for exact equality. */
1507
1508 bool
1509 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1510 {
1511 while (1)
1512 {
1513 if (a == b)
1514 return true;
1515 if (a == NULL || b == NULL)
1516 return false;
1517 if (!loc_descr_equal_p_1 (a, b))
1518 return false;
1519
1520 a = a->dw_loc_next;
1521 b = b->dw_loc_next;
1522 }
1523 }
1524
1525
1526 /* Add a constant POLY_OFFSET to a location expression. */
1527
1528 static void
1529 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1530 {
1531 dw_loc_descr_ref loc;
1532 HOST_WIDE_INT *p;
1533
1534 gcc_assert (*list_head != NULL);
1535
1536 if (known_eq (poly_offset, 0))
1537 return;
1538
1539 /* Find the end of the chain. */
1540 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1541 ;
1542
1543 HOST_WIDE_INT offset;
1544 if (!poly_offset.is_constant (&offset))
1545 {
1546 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1547 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1548 return;
1549 }
1550
1551 p = NULL;
1552 if (loc->dw_loc_opc == DW_OP_fbreg
1553 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1554 p = &loc->dw_loc_oprnd1.v.val_int;
1555 else if (loc->dw_loc_opc == DW_OP_bregx)
1556 p = &loc->dw_loc_oprnd2.v.val_int;
1557
1558 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1559 offset. Don't optimize if an signed integer overflow would happen. */
1560 if (p != NULL
1561 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1562 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1563 *p += offset;
1564
1565 else if (offset > 0)
1566 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1567
1568 else
1569 {
1570 loc->dw_loc_next
1571 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1572 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1573 }
1574 }
1575
1576 /* Return a pointer to a newly allocated location description for
1577 REG and OFFSET. */
1578
1579 static inline dw_loc_descr_ref
1580 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1581 {
1582 HOST_WIDE_INT const_offset;
1583 if (offset.is_constant (&const_offset))
1584 {
1585 if (reg <= 31)
1586 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1587 const_offset, 0);
1588 else
1589 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1590 }
1591 else
1592 {
1593 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1594 loc_descr_plus_const (&ret, offset);
1595 return ret;
1596 }
1597 }
1598
1599 /* Add a constant OFFSET to a location list. */
1600
1601 static void
1602 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1603 {
1604 dw_loc_list_ref d;
1605 for (d = list_head; d != NULL; d = d->dw_loc_next)
1606 loc_descr_plus_const (&d->expr, offset);
1607 }
1608
1609 #define DWARF_REF_SIZE \
1610 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1611
1612 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1613 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1614 DW_FORM_data16 with 128 bits. */
1615 #define DWARF_LARGEST_DATA_FORM_BITS \
1616 (dwarf_version >= 5 ? 128 : 64)
1617
1618 /* Utility inline function for construction of ops that were GNU extension
1619 before DWARF 5. */
1620 static inline enum dwarf_location_atom
1621 dwarf_OP (enum dwarf_location_atom op)
1622 {
1623 switch (op)
1624 {
1625 case DW_OP_implicit_pointer:
1626 if (dwarf_version < 5)
1627 return DW_OP_GNU_implicit_pointer;
1628 break;
1629
1630 case DW_OP_entry_value:
1631 if (dwarf_version < 5)
1632 return DW_OP_GNU_entry_value;
1633 break;
1634
1635 case DW_OP_const_type:
1636 if (dwarf_version < 5)
1637 return DW_OP_GNU_const_type;
1638 break;
1639
1640 case DW_OP_regval_type:
1641 if (dwarf_version < 5)
1642 return DW_OP_GNU_regval_type;
1643 break;
1644
1645 case DW_OP_deref_type:
1646 if (dwarf_version < 5)
1647 return DW_OP_GNU_deref_type;
1648 break;
1649
1650 case DW_OP_convert:
1651 if (dwarf_version < 5)
1652 return DW_OP_GNU_convert;
1653 break;
1654
1655 case DW_OP_reinterpret:
1656 if (dwarf_version < 5)
1657 return DW_OP_GNU_reinterpret;
1658 break;
1659
1660 case DW_OP_addrx:
1661 if (dwarf_version < 5)
1662 return DW_OP_GNU_addr_index;
1663 break;
1664
1665 case DW_OP_constx:
1666 if (dwarf_version < 5)
1667 return DW_OP_GNU_const_index;
1668 break;
1669
1670 default:
1671 break;
1672 }
1673 return op;
1674 }
1675
1676 /* Similarly for attributes. */
1677 static inline enum dwarf_attribute
1678 dwarf_AT (enum dwarf_attribute at)
1679 {
1680 switch (at)
1681 {
1682 case DW_AT_call_return_pc:
1683 if (dwarf_version < 5)
1684 return DW_AT_low_pc;
1685 break;
1686
1687 case DW_AT_call_tail_call:
1688 if (dwarf_version < 5)
1689 return DW_AT_GNU_tail_call;
1690 break;
1691
1692 case DW_AT_call_origin:
1693 if (dwarf_version < 5)
1694 return DW_AT_abstract_origin;
1695 break;
1696
1697 case DW_AT_call_target:
1698 if (dwarf_version < 5)
1699 return DW_AT_GNU_call_site_target;
1700 break;
1701
1702 case DW_AT_call_target_clobbered:
1703 if (dwarf_version < 5)
1704 return DW_AT_GNU_call_site_target_clobbered;
1705 break;
1706
1707 case DW_AT_call_parameter:
1708 if (dwarf_version < 5)
1709 return DW_AT_abstract_origin;
1710 break;
1711
1712 case DW_AT_call_value:
1713 if (dwarf_version < 5)
1714 return DW_AT_GNU_call_site_value;
1715 break;
1716
1717 case DW_AT_call_data_value:
1718 if (dwarf_version < 5)
1719 return DW_AT_GNU_call_site_data_value;
1720 break;
1721
1722 case DW_AT_call_all_calls:
1723 if (dwarf_version < 5)
1724 return DW_AT_GNU_all_call_sites;
1725 break;
1726
1727 case DW_AT_call_all_tail_calls:
1728 if (dwarf_version < 5)
1729 return DW_AT_GNU_all_tail_call_sites;
1730 break;
1731
1732 case DW_AT_dwo_name:
1733 if (dwarf_version < 5)
1734 return DW_AT_GNU_dwo_name;
1735 break;
1736
1737 case DW_AT_addr_base:
1738 if (dwarf_version < 5)
1739 return DW_AT_GNU_addr_base;
1740 break;
1741
1742 default:
1743 break;
1744 }
1745 return at;
1746 }
1747
1748 /* And similarly for tags. */
1749 static inline enum dwarf_tag
1750 dwarf_TAG (enum dwarf_tag tag)
1751 {
1752 switch (tag)
1753 {
1754 case DW_TAG_call_site:
1755 if (dwarf_version < 5)
1756 return DW_TAG_GNU_call_site;
1757 break;
1758
1759 case DW_TAG_call_site_parameter:
1760 if (dwarf_version < 5)
1761 return DW_TAG_GNU_call_site_parameter;
1762 break;
1763
1764 default:
1765 break;
1766 }
1767 return tag;
1768 }
1769
1770 /* And similarly for forms. */
1771 static inline enum dwarf_form
1772 dwarf_FORM (enum dwarf_form form)
1773 {
1774 switch (form)
1775 {
1776 case DW_FORM_addrx:
1777 if (dwarf_version < 5)
1778 return DW_FORM_GNU_addr_index;
1779 break;
1780
1781 case DW_FORM_strx:
1782 if (dwarf_version < 5)
1783 return DW_FORM_GNU_str_index;
1784 break;
1785
1786 default:
1787 break;
1788 }
1789 return form;
1790 }
1791
1792 static unsigned long int get_base_type_offset (dw_die_ref);
1793
1794 /* Return the size of a location descriptor. */
1795
1796 static unsigned long
1797 size_of_loc_descr (dw_loc_descr_ref loc)
1798 {
1799 unsigned long size = 1;
1800
1801 switch (loc->dw_loc_opc)
1802 {
1803 case DW_OP_addr:
1804 size += DWARF2_ADDR_SIZE;
1805 break;
1806 case DW_OP_GNU_addr_index:
1807 case DW_OP_addrx:
1808 case DW_OP_GNU_const_index:
1809 case DW_OP_constx:
1810 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1811 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1812 break;
1813 case DW_OP_const1u:
1814 case DW_OP_const1s:
1815 size += 1;
1816 break;
1817 case DW_OP_const2u:
1818 case DW_OP_const2s:
1819 size += 2;
1820 break;
1821 case DW_OP_const4u:
1822 case DW_OP_const4s:
1823 size += 4;
1824 break;
1825 case DW_OP_const8u:
1826 case DW_OP_const8s:
1827 size += 8;
1828 break;
1829 case DW_OP_constu:
1830 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1831 break;
1832 case DW_OP_consts:
1833 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1834 break;
1835 case DW_OP_pick:
1836 size += 1;
1837 break;
1838 case DW_OP_plus_uconst:
1839 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1840 break;
1841 case DW_OP_skip:
1842 case DW_OP_bra:
1843 size += 2;
1844 break;
1845 case DW_OP_breg0:
1846 case DW_OP_breg1:
1847 case DW_OP_breg2:
1848 case DW_OP_breg3:
1849 case DW_OP_breg4:
1850 case DW_OP_breg5:
1851 case DW_OP_breg6:
1852 case DW_OP_breg7:
1853 case DW_OP_breg8:
1854 case DW_OP_breg9:
1855 case DW_OP_breg10:
1856 case DW_OP_breg11:
1857 case DW_OP_breg12:
1858 case DW_OP_breg13:
1859 case DW_OP_breg14:
1860 case DW_OP_breg15:
1861 case DW_OP_breg16:
1862 case DW_OP_breg17:
1863 case DW_OP_breg18:
1864 case DW_OP_breg19:
1865 case DW_OP_breg20:
1866 case DW_OP_breg21:
1867 case DW_OP_breg22:
1868 case DW_OP_breg23:
1869 case DW_OP_breg24:
1870 case DW_OP_breg25:
1871 case DW_OP_breg26:
1872 case DW_OP_breg27:
1873 case DW_OP_breg28:
1874 case DW_OP_breg29:
1875 case DW_OP_breg30:
1876 case DW_OP_breg31:
1877 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1878 break;
1879 case DW_OP_regx:
1880 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1881 break;
1882 case DW_OP_fbreg:
1883 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1884 break;
1885 case DW_OP_bregx:
1886 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1887 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1888 break;
1889 case DW_OP_piece:
1890 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1891 break;
1892 case DW_OP_bit_piece:
1893 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1894 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1895 break;
1896 case DW_OP_deref_size:
1897 case DW_OP_xderef_size:
1898 size += 1;
1899 break;
1900 case DW_OP_call2:
1901 size += 2;
1902 break;
1903 case DW_OP_call4:
1904 size += 4;
1905 break;
1906 case DW_OP_call_ref:
1907 case DW_OP_GNU_variable_value:
1908 size += DWARF_REF_SIZE;
1909 break;
1910 case DW_OP_implicit_value:
1911 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1912 + loc->dw_loc_oprnd1.v.val_unsigned;
1913 break;
1914 case DW_OP_implicit_pointer:
1915 case DW_OP_GNU_implicit_pointer:
1916 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1917 break;
1918 case DW_OP_entry_value:
1919 case DW_OP_GNU_entry_value:
1920 {
1921 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1922 size += size_of_uleb128 (op_size) + op_size;
1923 break;
1924 }
1925 case DW_OP_const_type:
1926 case DW_OP_GNU_const_type:
1927 {
1928 unsigned long o
1929 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1930 size += size_of_uleb128 (o) + 1;
1931 switch (loc->dw_loc_oprnd2.val_class)
1932 {
1933 case dw_val_class_vec:
1934 size += loc->dw_loc_oprnd2.v.val_vec.length
1935 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1936 break;
1937 case dw_val_class_const:
1938 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1939 break;
1940 case dw_val_class_const_double:
1941 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1942 break;
1943 case dw_val_class_wide_int:
1944 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1945 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1946 break;
1947 default:
1948 gcc_unreachable ();
1949 }
1950 break;
1951 }
1952 case DW_OP_regval_type:
1953 case DW_OP_GNU_regval_type:
1954 {
1955 unsigned long o
1956 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1957 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1958 + size_of_uleb128 (o);
1959 }
1960 break;
1961 case DW_OP_deref_type:
1962 case DW_OP_GNU_deref_type:
1963 {
1964 unsigned long o
1965 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1966 size += 1 + size_of_uleb128 (o);
1967 }
1968 break;
1969 case DW_OP_convert:
1970 case DW_OP_reinterpret:
1971 case DW_OP_GNU_convert:
1972 case DW_OP_GNU_reinterpret:
1973 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1974 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1975 else
1976 {
1977 unsigned long o
1978 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1979 size += size_of_uleb128 (o);
1980 }
1981 break;
1982 case DW_OP_GNU_parameter_ref:
1983 size += 4;
1984 break;
1985 default:
1986 break;
1987 }
1988
1989 return size;
1990 }
1991
1992 /* Return the size of a series of location descriptors. */
1993
1994 unsigned long
1995 size_of_locs (dw_loc_descr_ref loc)
1996 {
1997 dw_loc_descr_ref l;
1998 unsigned long size;
1999
2000 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2001 field, to avoid writing to a PCH file. */
2002 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2003 {
2004 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2005 break;
2006 size += size_of_loc_descr (l);
2007 }
2008 if (! l)
2009 return size;
2010
2011 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2012 {
2013 l->dw_loc_addr = size;
2014 size += size_of_loc_descr (l);
2015 }
2016
2017 return size;
2018 }
2019
2020 /* Return the size of the value in a DW_AT_discr_value attribute. */
2021
2022 static int
2023 size_of_discr_value (dw_discr_value *discr_value)
2024 {
2025 if (discr_value->pos)
2026 return size_of_uleb128 (discr_value->v.uval);
2027 else
2028 return size_of_sleb128 (discr_value->v.sval);
2029 }
2030
2031 /* Return the size of the value in a DW_AT_discr_list attribute. */
2032
2033 static int
2034 size_of_discr_list (dw_discr_list_ref discr_list)
2035 {
2036 int size = 0;
2037
2038 for (dw_discr_list_ref list = discr_list;
2039 list != NULL;
2040 list = list->dw_discr_next)
2041 {
2042 /* One byte for the discriminant value descriptor, and then one or two
2043 LEB128 numbers, depending on whether it's a single case label or a
2044 range label. */
2045 size += 1;
2046 size += size_of_discr_value (&list->dw_discr_lower_bound);
2047 if (list->dw_discr_range != 0)
2048 size += size_of_discr_value (&list->dw_discr_upper_bound);
2049 }
2050 return size;
2051 }
2052
2053 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2054 static void get_ref_die_offset_label (char *, dw_die_ref);
2055 static unsigned long int get_ref_die_offset (dw_die_ref);
2056
2057 /* Output location description stack opcode's operands (if any).
2058 The for_eh_or_skip parameter controls whether register numbers are
2059 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2060 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2061 info). This should be suppressed for the cases that have not been converted
2062 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2063
2064 static void
2065 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2066 {
2067 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2068 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2069
2070 switch (loc->dw_loc_opc)
2071 {
2072 #ifdef DWARF2_DEBUGGING_INFO
2073 case DW_OP_const2u:
2074 case DW_OP_const2s:
2075 dw2_asm_output_data (2, val1->v.val_int, NULL);
2076 break;
2077 case DW_OP_const4u:
2078 if (loc->dtprel)
2079 {
2080 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2081 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2082 val1->v.val_addr);
2083 fputc ('\n', asm_out_file);
2084 break;
2085 }
2086 /* FALLTHRU */
2087 case DW_OP_const4s:
2088 dw2_asm_output_data (4, val1->v.val_int, NULL);
2089 break;
2090 case DW_OP_const8u:
2091 if (loc->dtprel)
2092 {
2093 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2094 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2095 val1->v.val_addr);
2096 fputc ('\n', asm_out_file);
2097 break;
2098 }
2099 /* FALLTHRU */
2100 case DW_OP_const8s:
2101 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2102 dw2_asm_output_data (8, val1->v.val_int, NULL);
2103 break;
2104 case DW_OP_skip:
2105 case DW_OP_bra:
2106 {
2107 int offset;
2108
2109 gcc_assert (val1->val_class == dw_val_class_loc);
2110 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2111
2112 dw2_asm_output_data (2, offset, NULL);
2113 }
2114 break;
2115 case DW_OP_implicit_value:
2116 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2117 switch (val2->val_class)
2118 {
2119 case dw_val_class_const:
2120 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2121 break;
2122 case dw_val_class_vec:
2123 {
2124 unsigned int elt_size = val2->v.val_vec.elt_size;
2125 unsigned int len = val2->v.val_vec.length;
2126 unsigned int i;
2127 unsigned char *p;
2128
2129 if (elt_size > sizeof (HOST_WIDE_INT))
2130 {
2131 elt_size /= 2;
2132 len *= 2;
2133 }
2134 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2135 i < len;
2136 i++, p += elt_size)
2137 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2138 "fp or vector constant word %u", i);
2139 }
2140 break;
2141 case dw_val_class_const_double:
2142 {
2143 unsigned HOST_WIDE_INT first, second;
2144
2145 if (WORDS_BIG_ENDIAN)
2146 {
2147 first = val2->v.val_double.high;
2148 second = val2->v.val_double.low;
2149 }
2150 else
2151 {
2152 first = val2->v.val_double.low;
2153 second = val2->v.val_double.high;
2154 }
2155 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2156 first, NULL);
2157 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2158 second, NULL);
2159 }
2160 break;
2161 case dw_val_class_wide_int:
2162 {
2163 int i;
2164 int len = get_full_len (*val2->v.val_wide);
2165 if (WORDS_BIG_ENDIAN)
2166 for (i = len - 1; i >= 0; --i)
2167 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2168 val2->v.val_wide->elt (i), NULL);
2169 else
2170 for (i = 0; i < len; ++i)
2171 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2172 val2->v.val_wide->elt (i), NULL);
2173 }
2174 break;
2175 case dw_val_class_addr:
2176 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2177 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2178 break;
2179 default:
2180 gcc_unreachable ();
2181 }
2182 break;
2183 #else
2184 case DW_OP_const2u:
2185 case DW_OP_const2s:
2186 case DW_OP_const4u:
2187 case DW_OP_const4s:
2188 case DW_OP_const8u:
2189 case DW_OP_const8s:
2190 case DW_OP_skip:
2191 case DW_OP_bra:
2192 case DW_OP_implicit_value:
2193 /* We currently don't make any attempt to make sure these are
2194 aligned properly like we do for the main unwind info, so
2195 don't support emitting things larger than a byte if we're
2196 only doing unwinding. */
2197 gcc_unreachable ();
2198 #endif
2199 case DW_OP_const1u:
2200 case DW_OP_const1s:
2201 dw2_asm_output_data (1, val1->v.val_int, NULL);
2202 break;
2203 case DW_OP_constu:
2204 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2205 break;
2206 case DW_OP_consts:
2207 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2208 break;
2209 case DW_OP_pick:
2210 dw2_asm_output_data (1, val1->v.val_int, NULL);
2211 break;
2212 case DW_OP_plus_uconst:
2213 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2214 break;
2215 case DW_OP_breg0:
2216 case DW_OP_breg1:
2217 case DW_OP_breg2:
2218 case DW_OP_breg3:
2219 case DW_OP_breg4:
2220 case DW_OP_breg5:
2221 case DW_OP_breg6:
2222 case DW_OP_breg7:
2223 case DW_OP_breg8:
2224 case DW_OP_breg9:
2225 case DW_OP_breg10:
2226 case DW_OP_breg11:
2227 case DW_OP_breg12:
2228 case DW_OP_breg13:
2229 case DW_OP_breg14:
2230 case DW_OP_breg15:
2231 case DW_OP_breg16:
2232 case DW_OP_breg17:
2233 case DW_OP_breg18:
2234 case DW_OP_breg19:
2235 case DW_OP_breg20:
2236 case DW_OP_breg21:
2237 case DW_OP_breg22:
2238 case DW_OP_breg23:
2239 case DW_OP_breg24:
2240 case DW_OP_breg25:
2241 case DW_OP_breg26:
2242 case DW_OP_breg27:
2243 case DW_OP_breg28:
2244 case DW_OP_breg29:
2245 case DW_OP_breg30:
2246 case DW_OP_breg31:
2247 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2248 break;
2249 case DW_OP_regx:
2250 {
2251 unsigned r = val1->v.val_unsigned;
2252 if (for_eh_or_skip >= 0)
2253 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2254 gcc_assert (size_of_uleb128 (r)
2255 == size_of_uleb128 (val1->v.val_unsigned));
2256 dw2_asm_output_data_uleb128 (r, NULL);
2257 }
2258 break;
2259 case DW_OP_fbreg:
2260 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2261 break;
2262 case DW_OP_bregx:
2263 {
2264 unsigned r = val1->v.val_unsigned;
2265 if (for_eh_or_skip >= 0)
2266 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2267 gcc_assert (size_of_uleb128 (r)
2268 == size_of_uleb128 (val1->v.val_unsigned));
2269 dw2_asm_output_data_uleb128 (r, NULL);
2270 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2271 }
2272 break;
2273 case DW_OP_piece:
2274 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2275 break;
2276 case DW_OP_bit_piece:
2277 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2278 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2279 break;
2280 case DW_OP_deref_size:
2281 case DW_OP_xderef_size:
2282 dw2_asm_output_data (1, val1->v.val_int, NULL);
2283 break;
2284
2285 case DW_OP_addr:
2286 if (loc->dtprel)
2287 {
2288 if (targetm.asm_out.output_dwarf_dtprel)
2289 {
2290 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2291 DWARF2_ADDR_SIZE,
2292 val1->v.val_addr);
2293 fputc ('\n', asm_out_file);
2294 }
2295 else
2296 gcc_unreachable ();
2297 }
2298 else
2299 {
2300 #ifdef DWARF2_DEBUGGING_INFO
2301 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2302 #else
2303 gcc_unreachable ();
2304 #endif
2305 }
2306 break;
2307
2308 case DW_OP_GNU_addr_index:
2309 case DW_OP_addrx:
2310 case DW_OP_GNU_const_index:
2311 case DW_OP_constx:
2312 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2313 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2314 "(index into .debug_addr)");
2315 break;
2316
2317 case DW_OP_call2:
2318 case DW_OP_call4:
2319 {
2320 unsigned long die_offset
2321 = get_ref_die_offset (val1->v.val_die_ref.die);
2322 /* Make sure the offset has been computed and that we can encode it as
2323 an operand. */
2324 gcc_assert (die_offset > 0
2325 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2326 ? 0xffff
2327 : 0xffffffff));
2328 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2329 die_offset, NULL);
2330 }
2331 break;
2332
2333 case DW_OP_call_ref:
2334 case DW_OP_GNU_variable_value:
2335 {
2336 char label[MAX_ARTIFICIAL_LABEL_BYTES
2337 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2338 gcc_assert (val1->val_class == dw_val_class_die_ref);
2339 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2340 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2341 }
2342 break;
2343
2344 case DW_OP_implicit_pointer:
2345 case DW_OP_GNU_implicit_pointer:
2346 {
2347 char label[MAX_ARTIFICIAL_LABEL_BYTES
2348 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2349 gcc_assert (val1->val_class == dw_val_class_die_ref);
2350 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2351 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2352 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2353 }
2354 break;
2355
2356 case DW_OP_entry_value:
2357 case DW_OP_GNU_entry_value:
2358 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2359 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2360 break;
2361
2362 case DW_OP_const_type:
2363 case DW_OP_GNU_const_type:
2364 {
2365 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2366 gcc_assert (o);
2367 dw2_asm_output_data_uleb128 (o, NULL);
2368 switch (val2->val_class)
2369 {
2370 case dw_val_class_const:
2371 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2372 dw2_asm_output_data (1, l, NULL);
2373 dw2_asm_output_data (l, val2->v.val_int, NULL);
2374 break;
2375 case dw_val_class_vec:
2376 {
2377 unsigned int elt_size = val2->v.val_vec.elt_size;
2378 unsigned int len = val2->v.val_vec.length;
2379 unsigned int i;
2380 unsigned char *p;
2381
2382 l = len * elt_size;
2383 dw2_asm_output_data (1, l, NULL);
2384 if (elt_size > sizeof (HOST_WIDE_INT))
2385 {
2386 elt_size /= 2;
2387 len *= 2;
2388 }
2389 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2390 i < len;
2391 i++, p += elt_size)
2392 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2393 "fp or vector constant word %u", i);
2394 }
2395 break;
2396 case dw_val_class_const_double:
2397 {
2398 unsigned HOST_WIDE_INT first, second;
2399 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2400
2401 dw2_asm_output_data (1, 2 * l, NULL);
2402 if (WORDS_BIG_ENDIAN)
2403 {
2404 first = val2->v.val_double.high;
2405 second = val2->v.val_double.low;
2406 }
2407 else
2408 {
2409 first = val2->v.val_double.low;
2410 second = val2->v.val_double.high;
2411 }
2412 dw2_asm_output_data (l, first, NULL);
2413 dw2_asm_output_data (l, second, NULL);
2414 }
2415 break;
2416 case dw_val_class_wide_int:
2417 {
2418 int i;
2419 int len = get_full_len (*val2->v.val_wide);
2420 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2421
2422 dw2_asm_output_data (1, len * l, NULL);
2423 if (WORDS_BIG_ENDIAN)
2424 for (i = len - 1; i >= 0; --i)
2425 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2426 else
2427 for (i = 0; i < len; ++i)
2428 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2429 }
2430 break;
2431 default:
2432 gcc_unreachable ();
2433 }
2434 }
2435 break;
2436 case DW_OP_regval_type:
2437 case DW_OP_GNU_regval_type:
2438 {
2439 unsigned r = val1->v.val_unsigned;
2440 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2441 gcc_assert (o);
2442 if (for_eh_or_skip >= 0)
2443 {
2444 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2445 gcc_assert (size_of_uleb128 (r)
2446 == size_of_uleb128 (val1->v.val_unsigned));
2447 }
2448 dw2_asm_output_data_uleb128 (r, NULL);
2449 dw2_asm_output_data_uleb128 (o, NULL);
2450 }
2451 break;
2452 case DW_OP_deref_type:
2453 case DW_OP_GNU_deref_type:
2454 {
2455 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2456 gcc_assert (o);
2457 dw2_asm_output_data (1, val1->v.val_int, NULL);
2458 dw2_asm_output_data_uleb128 (o, NULL);
2459 }
2460 break;
2461 case DW_OP_convert:
2462 case DW_OP_reinterpret:
2463 case DW_OP_GNU_convert:
2464 case DW_OP_GNU_reinterpret:
2465 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2466 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2467 else
2468 {
2469 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2470 gcc_assert (o);
2471 dw2_asm_output_data_uleb128 (o, NULL);
2472 }
2473 break;
2474
2475 case DW_OP_GNU_parameter_ref:
2476 {
2477 unsigned long o;
2478 gcc_assert (val1->val_class == dw_val_class_die_ref);
2479 o = get_ref_die_offset (val1->v.val_die_ref.die);
2480 dw2_asm_output_data (4, o, NULL);
2481 }
2482 break;
2483
2484 default:
2485 /* Other codes have no operands. */
2486 break;
2487 }
2488 }
2489
2490 /* Output a sequence of location operations.
2491 The for_eh_or_skip parameter controls whether register numbers are
2492 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2493 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2494 info). This should be suppressed for the cases that have not been converted
2495 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2496
2497 void
2498 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2499 {
2500 for (; loc != NULL; loc = loc->dw_loc_next)
2501 {
2502 enum dwarf_location_atom opc = loc->dw_loc_opc;
2503 /* Output the opcode. */
2504 if (for_eh_or_skip >= 0
2505 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2506 {
2507 unsigned r = (opc - DW_OP_breg0);
2508 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2509 gcc_assert (r <= 31);
2510 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2511 }
2512 else if (for_eh_or_skip >= 0
2513 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2514 {
2515 unsigned r = (opc - DW_OP_reg0);
2516 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2517 gcc_assert (r <= 31);
2518 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2519 }
2520
2521 dw2_asm_output_data (1, opc,
2522 "%s", dwarf_stack_op_name (opc));
2523
2524 /* Output the operand(s) (if any). */
2525 output_loc_operands (loc, for_eh_or_skip);
2526 }
2527 }
2528
2529 /* Output location description stack opcode's operands (if any).
2530 The output is single bytes on a line, suitable for .cfi_escape. */
2531
2532 static void
2533 output_loc_operands_raw (dw_loc_descr_ref loc)
2534 {
2535 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2536 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2537
2538 switch (loc->dw_loc_opc)
2539 {
2540 case DW_OP_addr:
2541 case DW_OP_GNU_addr_index:
2542 case DW_OP_addrx:
2543 case DW_OP_GNU_const_index:
2544 case DW_OP_constx:
2545 case DW_OP_implicit_value:
2546 /* We cannot output addresses in .cfi_escape, only bytes. */
2547 gcc_unreachable ();
2548
2549 case DW_OP_const1u:
2550 case DW_OP_const1s:
2551 case DW_OP_pick:
2552 case DW_OP_deref_size:
2553 case DW_OP_xderef_size:
2554 fputc (',', asm_out_file);
2555 dw2_asm_output_data_raw (1, val1->v.val_int);
2556 break;
2557
2558 case DW_OP_const2u:
2559 case DW_OP_const2s:
2560 fputc (',', asm_out_file);
2561 dw2_asm_output_data_raw (2, val1->v.val_int);
2562 break;
2563
2564 case DW_OP_const4u:
2565 case DW_OP_const4s:
2566 fputc (',', asm_out_file);
2567 dw2_asm_output_data_raw (4, val1->v.val_int);
2568 break;
2569
2570 case DW_OP_const8u:
2571 case DW_OP_const8s:
2572 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2573 fputc (',', asm_out_file);
2574 dw2_asm_output_data_raw (8, val1->v.val_int);
2575 break;
2576
2577 case DW_OP_skip:
2578 case DW_OP_bra:
2579 {
2580 int offset;
2581
2582 gcc_assert (val1->val_class == dw_val_class_loc);
2583 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2584
2585 fputc (',', asm_out_file);
2586 dw2_asm_output_data_raw (2, offset);
2587 }
2588 break;
2589
2590 case DW_OP_regx:
2591 {
2592 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2593 gcc_assert (size_of_uleb128 (r)
2594 == size_of_uleb128 (val1->v.val_unsigned));
2595 fputc (',', asm_out_file);
2596 dw2_asm_output_data_uleb128_raw (r);
2597 }
2598 break;
2599
2600 case DW_OP_constu:
2601 case DW_OP_plus_uconst:
2602 case DW_OP_piece:
2603 fputc (',', asm_out_file);
2604 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2605 break;
2606
2607 case DW_OP_bit_piece:
2608 fputc (',', asm_out_file);
2609 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2610 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2611 break;
2612
2613 case DW_OP_consts:
2614 case DW_OP_breg0:
2615 case DW_OP_breg1:
2616 case DW_OP_breg2:
2617 case DW_OP_breg3:
2618 case DW_OP_breg4:
2619 case DW_OP_breg5:
2620 case DW_OP_breg6:
2621 case DW_OP_breg7:
2622 case DW_OP_breg8:
2623 case DW_OP_breg9:
2624 case DW_OP_breg10:
2625 case DW_OP_breg11:
2626 case DW_OP_breg12:
2627 case DW_OP_breg13:
2628 case DW_OP_breg14:
2629 case DW_OP_breg15:
2630 case DW_OP_breg16:
2631 case DW_OP_breg17:
2632 case DW_OP_breg18:
2633 case DW_OP_breg19:
2634 case DW_OP_breg20:
2635 case DW_OP_breg21:
2636 case DW_OP_breg22:
2637 case DW_OP_breg23:
2638 case DW_OP_breg24:
2639 case DW_OP_breg25:
2640 case DW_OP_breg26:
2641 case DW_OP_breg27:
2642 case DW_OP_breg28:
2643 case DW_OP_breg29:
2644 case DW_OP_breg30:
2645 case DW_OP_breg31:
2646 case DW_OP_fbreg:
2647 fputc (',', asm_out_file);
2648 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2649 break;
2650
2651 case DW_OP_bregx:
2652 {
2653 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2654 gcc_assert (size_of_uleb128 (r)
2655 == size_of_uleb128 (val1->v.val_unsigned));
2656 fputc (',', asm_out_file);
2657 dw2_asm_output_data_uleb128_raw (r);
2658 fputc (',', asm_out_file);
2659 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2660 }
2661 break;
2662
2663 case DW_OP_implicit_pointer:
2664 case DW_OP_entry_value:
2665 case DW_OP_const_type:
2666 case DW_OP_regval_type:
2667 case DW_OP_deref_type:
2668 case DW_OP_convert:
2669 case DW_OP_reinterpret:
2670 case DW_OP_GNU_implicit_pointer:
2671 case DW_OP_GNU_entry_value:
2672 case DW_OP_GNU_const_type:
2673 case DW_OP_GNU_regval_type:
2674 case DW_OP_GNU_deref_type:
2675 case DW_OP_GNU_convert:
2676 case DW_OP_GNU_reinterpret:
2677 case DW_OP_GNU_parameter_ref:
2678 gcc_unreachable ();
2679 break;
2680
2681 default:
2682 /* Other codes have no operands. */
2683 break;
2684 }
2685 }
2686
2687 void
2688 output_loc_sequence_raw (dw_loc_descr_ref loc)
2689 {
2690 while (1)
2691 {
2692 enum dwarf_location_atom opc = loc->dw_loc_opc;
2693 /* Output the opcode. */
2694 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2695 {
2696 unsigned r = (opc - DW_OP_breg0);
2697 r = DWARF2_FRAME_REG_OUT (r, 1);
2698 gcc_assert (r <= 31);
2699 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2700 }
2701 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2702 {
2703 unsigned r = (opc - DW_OP_reg0);
2704 r = DWARF2_FRAME_REG_OUT (r, 1);
2705 gcc_assert (r <= 31);
2706 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2707 }
2708 /* Output the opcode. */
2709 fprintf (asm_out_file, "%#x", opc);
2710 output_loc_operands_raw (loc);
2711
2712 if (!loc->dw_loc_next)
2713 break;
2714 loc = loc->dw_loc_next;
2715
2716 fputc (',', asm_out_file);
2717 }
2718 }
2719
2720 /* This function builds a dwarf location descriptor sequence from a
2721 dw_cfa_location, adding the given OFFSET to the result of the
2722 expression. */
2723
2724 struct dw_loc_descr_node *
2725 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2726 {
2727 struct dw_loc_descr_node *head, *tmp;
2728
2729 offset += cfa->offset;
2730
2731 if (cfa->indirect)
2732 {
2733 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2734 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2735 head->dw_loc_oprnd1.val_entry = NULL;
2736 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2737 add_loc_descr (&head, tmp);
2738 loc_descr_plus_const (&head, offset);
2739 }
2740 else
2741 head = new_reg_loc_descr (cfa->reg, offset);
2742
2743 return head;
2744 }
2745
2746 /* This function builds a dwarf location descriptor sequence for
2747 the address at OFFSET from the CFA when stack is aligned to
2748 ALIGNMENT byte. */
2749
2750 struct dw_loc_descr_node *
2751 build_cfa_aligned_loc (dw_cfa_location *cfa,
2752 poly_int64 offset, HOST_WIDE_INT alignment)
2753 {
2754 struct dw_loc_descr_node *head;
2755 unsigned int dwarf_fp
2756 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2757
2758 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2759 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2760 {
2761 head = new_reg_loc_descr (dwarf_fp, 0);
2762 add_loc_descr (&head, int_loc_descriptor (alignment));
2763 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2764 loc_descr_plus_const (&head, offset);
2765 }
2766 else
2767 head = new_reg_loc_descr (dwarf_fp, offset);
2768 return head;
2769 }
2770 \f
2771 /* And now, the support for symbolic debugging information. */
2772
2773 /* .debug_str support. */
2774
2775 static void dwarf2out_init (const char *);
2776 static void dwarf2out_finish (const char *);
2777 static void dwarf2out_early_finish (const char *);
2778 static void dwarf2out_assembly_start (void);
2779 static void dwarf2out_define (unsigned int, const char *);
2780 static void dwarf2out_undef (unsigned int, const char *);
2781 static void dwarf2out_start_source_file (unsigned, const char *);
2782 static void dwarf2out_end_source_file (unsigned);
2783 static void dwarf2out_function_decl (tree);
2784 static void dwarf2out_begin_block (unsigned, unsigned);
2785 static void dwarf2out_end_block (unsigned, unsigned);
2786 static bool dwarf2out_ignore_block (const_tree);
2787 static void dwarf2out_early_global_decl (tree);
2788 static void dwarf2out_late_global_decl (tree);
2789 static void dwarf2out_type_decl (tree, int);
2790 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2791 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2792 dw_die_ref);
2793 static void dwarf2out_abstract_function (tree);
2794 static void dwarf2out_var_location (rtx_insn *);
2795 static void dwarf2out_inline_entry (tree);
2796 static void dwarf2out_size_function (tree);
2797 static void dwarf2out_begin_function (tree);
2798 static void dwarf2out_end_function (unsigned int);
2799 static void dwarf2out_register_main_translation_unit (tree unit);
2800 static void dwarf2out_set_name (tree, tree);
2801 static void dwarf2out_register_external_die (tree decl, const char *sym,
2802 unsigned HOST_WIDE_INT off);
2803 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2804 unsigned HOST_WIDE_INT *off);
2805
2806 /* The debug hooks structure. */
2807
2808 const struct gcc_debug_hooks dwarf2_debug_hooks =
2809 {
2810 dwarf2out_init,
2811 dwarf2out_finish,
2812 dwarf2out_early_finish,
2813 dwarf2out_assembly_start,
2814 dwarf2out_define,
2815 dwarf2out_undef,
2816 dwarf2out_start_source_file,
2817 dwarf2out_end_source_file,
2818 dwarf2out_begin_block,
2819 dwarf2out_end_block,
2820 dwarf2out_ignore_block,
2821 dwarf2out_source_line,
2822 dwarf2out_begin_prologue,
2823 #if VMS_DEBUGGING_INFO
2824 dwarf2out_vms_end_prologue,
2825 dwarf2out_vms_begin_epilogue,
2826 #else
2827 debug_nothing_int_charstar,
2828 debug_nothing_int_charstar,
2829 #endif
2830 dwarf2out_end_epilogue,
2831 dwarf2out_begin_function,
2832 dwarf2out_end_function, /* end_function */
2833 dwarf2out_register_main_translation_unit,
2834 dwarf2out_function_decl, /* function_decl */
2835 dwarf2out_early_global_decl,
2836 dwarf2out_late_global_decl,
2837 dwarf2out_type_decl, /* type_decl */
2838 dwarf2out_imported_module_or_decl,
2839 dwarf2out_die_ref_for_decl,
2840 dwarf2out_register_external_die,
2841 debug_nothing_tree, /* deferred_inline_function */
2842 /* The DWARF 2 backend tries to reduce debugging bloat by not
2843 emitting the abstract description of inline functions until
2844 something tries to reference them. */
2845 dwarf2out_abstract_function, /* outlining_inline_function */
2846 debug_nothing_rtx_code_label, /* label */
2847 debug_nothing_int, /* handle_pch */
2848 dwarf2out_var_location,
2849 dwarf2out_inline_entry, /* inline_entry */
2850 dwarf2out_size_function, /* size_function */
2851 dwarf2out_switch_text_section,
2852 dwarf2out_set_name,
2853 1, /* start_end_main_source_file */
2854 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2855 };
2856
2857 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2858 {
2859 dwarf2out_init,
2860 debug_nothing_charstar,
2861 debug_nothing_charstar,
2862 dwarf2out_assembly_start,
2863 debug_nothing_int_charstar,
2864 debug_nothing_int_charstar,
2865 debug_nothing_int_charstar,
2866 debug_nothing_int,
2867 debug_nothing_int_int, /* begin_block */
2868 debug_nothing_int_int, /* end_block */
2869 debug_true_const_tree, /* ignore_block */
2870 dwarf2out_source_line, /* source_line */
2871 debug_nothing_int_int_charstar, /* begin_prologue */
2872 debug_nothing_int_charstar, /* end_prologue */
2873 debug_nothing_int_charstar, /* begin_epilogue */
2874 debug_nothing_int_charstar, /* end_epilogue */
2875 debug_nothing_tree, /* begin_function */
2876 debug_nothing_int, /* end_function */
2877 debug_nothing_tree, /* register_main_translation_unit */
2878 debug_nothing_tree, /* function_decl */
2879 debug_nothing_tree, /* early_global_decl */
2880 debug_nothing_tree, /* late_global_decl */
2881 debug_nothing_tree_int, /* type_decl */
2882 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2883 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2884 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2885 debug_nothing_tree, /* deferred_inline_function */
2886 debug_nothing_tree, /* outlining_inline_function */
2887 debug_nothing_rtx_code_label, /* label */
2888 debug_nothing_int, /* handle_pch */
2889 debug_nothing_rtx_insn, /* var_location */
2890 debug_nothing_tree, /* inline_entry */
2891 debug_nothing_tree, /* size_function */
2892 debug_nothing_void, /* switch_text_section */
2893 debug_nothing_tree_tree, /* set_name */
2894 0, /* start_end_main_source_file */
2895 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2896 };
2897 \f
2898 /* NOTE: In the comments in this file, many references are made to
2899 "Debugging Information Entries". This term is abbreviated as `DIE'
2900 throughout the remainder of this file. */
2901
2902 /* An internal representation of the DWARF output is built, and then
2903 walked to generate the DWARF debugging info. The walk of the internal
2904 representation is done after the entire program has been compiled.
2905 The types below are used to describe the internal representation. */
2906
2907 /* Whether to put type DIEs into their own section .debug_types instead
2908 of making them part of the .debug_info section. Only supported for
2909 Dwarf V4 or higher and the user didn't disable them through
2910 -fno-debug-types-section. It is more efficient to put them in a
2911 separate comdat sections since the linker will then be able to
2912 remove duplicates. But not all tools support .debug_types sections
2913 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2914 it is DW_UT_type unit type in .debug_info section. */
2915
2916 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2917
2918 /* Various DIE's use offsets relative to the beginning of the
2919 .debug_info section to refer to each other. */
2920
2921 typedef long int dw_offset;
2922
2923 struct comdat_type_node;
2924
2925 /* The entries in the line_info table more-or-less mirror the opcodes
2926 that are used in the real dwarf line table. Arrays of these entries
2927 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2928 supported. */
2929
2930 enum dw_line_info_opcode {
2931 /* Emit DW_LNE_set_address; the operand is the label index. */
2932 LI_set_address,
2933
2934 /* Emit a row to the matrix with the given line. This may be done
2935 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2936 special opcodes. */
2937 LI_set_line,
2938
2939 /* Emit a DW_LNS_set_file. */
2940 LI_set_file,
2941
2942 /* Emit a DW_LNS_set_column. */
2943 LI_set_column,
2944
2945 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2946 LI_negate_stmt,
2947
2948 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2949 LI_set_prologue_end,
2950 LI_set_epilogue_begin,
2951
2952 /* Emit a DW_LNE_set_discriminator. */
2953 LI_set_discriminator,
2954
2955 /* Output a Fixed Advance PC; the target PC is the label index; the
2956 base PC is the previous LI_adv_address or LI_set_address entry.
2957 We only use this when emitting debug views without assembler
2958 support, at explicit user request. Ideally, we should only use
2959 it when the offset might be zero but we can't tell: it's the only
2960 way to maybe change the PC without resetting the view number. */
2961 LI_adv_address
2962 };
2963
2964 typedef struct GTY(()) dw_line_info_struct {
2965 enum dw_line_info_opcode opcode;
2966 unsigned int val;
2967 } dw_line_info_entry;
2968
2969
2970 struct GTY(()) dw_line_info_table {
2971 /* The label that marks the end of this section. */
2972 const char *end_label;
2973
2974 /* The values for the last row of the matrix, as collected in the table.
2975 These are used to minimize the changes to the next row. */
2976 unsigned int file_num;
2977 unsigned int line_num;
2978 unsigned int column_num;
2979 int discrim_num;
2980 bool is_stmt;
2981 bool in_use;
2982
2983 /* This denotes the NEXT view number.
2984
2985 If it is 0, it is known that the NEXT view will be the first view
2986 at the given PC.
2987
2988 If it is -1, we're forcing the view number to be reset, e.g. at a
2989 function entry.
2990
2991 The meaning of other nonzero values depends on whether we're
2992 computing views internally or leaving it for the assembler to do
2993 so. If we're emitting them internally, view denotes the view
2994 number since the last known advance of PC. If we're leaving it
2995 for the assembler, it denotes the LVU label number that we're
2996 going to ask the assembler to assign. */
2997 var_loc_view view;
2998
2999 /* This counts the number of symbolic views emitted in this table
3000 since the latest view reset. Its max value, over all tables,
3001 sets symview_upper_bound. */
3002 var_loc_view symviews_since_reset;
3003
3004 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3005 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3006 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3007 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3008
3009 vec<dw_line_info_entry, va_gc> *entries;
3010 };
3011
3012 /* This is an upper bound for view numbers that the assembler may
3013 assign to symbolic views output in this translation. It is used to
3014 decide how big a field to use to represent view numbers in
3015 symview-classed attributes. */
3016
3017 static var_loc_view symview_upper_bound;
3018
3019 /* If we're keep track of location views and their reset points, and
3020 INSN is a reset point (i.e., it necessarily advances the PC), mark
3021 the next view in TABLE as reset. */
3022
3023 static void
3024 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3025 {
3026 if (!debug_internal_reset_location_views)
3027 return;
3028
3029 /* Maybe turn (part of?) this test into a default target hook. */
3030 int reset = 0;
3031
3032 if (targetm.reset_location_view)
3033 reset = targetm.reset_location_view (insn);
3034
3035 if (reset)
3036 ;
3037 else if (JUMP_TABLE_DATA_P (insn))
3038 reset = 1;
3039 else if (GET_CODE (insn) == USE
3040 || GET_CODE (insn) == CLOBBER
3041 || GET_CODE (insn) == ASM_INPUT
3042 || asm_noperands (insn) >= 0)
3043 ;
3044 else if (get_attr_min_length (insn) > 0)
3045 reset = 1;
3046
3047 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3048 RESET_NEXT_VIEW (table->view);
3049 }
3050
3051 /* Each DIE attribute has a field specifying the attribute kind,
3052 a link to the next attribute in the chain, and an attribute value.
3053 Attributes are typically linked below the DIE they modify. */
3054
3055 typedef struct GTY(()) dw_attr_struct {
3056 enum dwarf_attribute dw_attr;
3057 dw_val_node dw_attr_val;
3058 }
3059 dw_attr_node;
3060
3061
3062 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3063 The children of each node form a circular list linked by
3064 die_sib. die_child points to the node *before* the "first" child node. */
3065
3066 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3067 union die_symbol_or_type_node
3068 {
3069 const char * GTY ((tag ("0"))) die_symbol;
3070 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3071 }
3072 GTY ((desc ("%0.comdat_type_p"))) die_id;
3073 vec<dw_attr_node, va_gc> *die_attr;
3074 dw_die_ref die_parent;
3075 dw_die_ref die_child;
3076 dw_die_ref die_sib;
3077 dw_die_ref die_definition; /* ref from a specification to its definition */
3078 dw_offset die_offset;
3079 unsigned long die_abbrev;
3080 int die_mark;
3081 unsigned int decl_id;
3082 enum dwarf_tag die_tag;
3083 /* Die is used and must not be pruned as unused. */
3084 BOOL_BITFIELD die_perennial_p : 1;
3085 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3086 /* For an external ref to die_symbol if die_offset contains an extra
3087 offset to that symbol. */
3088 BOOL_BITFIELD with_offset : 1;
3089 /* Whether this DIE was removed from the DIE tree, for example via
3090 prune_unused_types. We don't consider those present from the
3091 DIE lookup routines. */
3092 BOOL_BITFIELD removed : 1;
3093 /* Lots of spare bits. */
3094 }
3095 die_node;
3096
3097 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3098 static bool early_dwarf;
3099 static bool early_dwarf_finished;
3100 struct set_early_dwarf {
3101 bool saved;
3102 set_early_dwarf () : saved(early_dwarf)
3103 {
3104 gcc_assert (! early_dwarf_finished);
3105 early_dwarf = true;
3106 }
3107 ~set_early_dwarf () { early_dwarf = saved; }
3108 };
3109
3110 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3111 #define FOR_EACH_CHILD(die, c, expr) do { \
3112 c = die->die_child; \
3113 if (c) do { \
3114 c = c->die_sib; \
3115 expr; \
3116 } while (c != die->die_child); \
3117 } while (0)
3118
3119 /* The pubname structure */
3120
3121 typedef struct GTY(()) pubname_struct {
3122 dw_die_ref die;
3123 const char *name;
3124 }
3125 pubname_entry;
3126
3127
3128 struct GTY(()) dw_ranges {
3129 const char *label;
3130 /* If this is positive, it's a block number, otherwise it's a
3131 bitwise-negated index into dw_ranges_by_label. */
3132 int num;
3133 /* Index for the range list for DW_FORM_rnglistx. */
3134 unsigned int idx : 31;
3135 /* True if this range might be possibly in a different section
3136 from previous entry. */
3137 unsigned int maybe_new_sec : 1;
3138 };
3139
3140 /* A structure to hold a macinfo entry. */
3141
3142 typedef struct GTY(()) macinfo_struct {
3143 unsigned char code;
3144 unsigned HOST_WIDE_INT lineno;
3145 const char *info;
3146 }
3147 macinfo_entry;
3148
3149
3150 struct GTY(()) dw_ranges_by_label {
3151 const char *begin;
3152 const char *end;
3153 };
3154
3155 /* The comdat type node structure. */
3156 struct GTY(()) comdat_type_node
3157 {
3158 dw_die_ref root_die;
3159 dw_die_ref type_die;
3160 dw_die_ref skeleton_die;
3161 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3162 comdat_type_node *next;
3163 };
3164
3165 /* A list of DIEs for which we can't determine ancestry (parent_die
3166 field) just yet. Later in dwarf2out_finish we will fill in the
3167 missing bits. */
3168 typedef struct GTY(()) limbo_die_struct {
3169 dw_die_ref die;
3170 /* The tree for which this DIE was created. We use this to
3171 determine ancestry later. */
3172 tree created_for;
3173 struct limbo_die_struct *next;
3174 }
3175 limbo_die_node;
3176
3177 typedef struct skeleton_chain_struct
3178 {
3179 dw_die_ref old_die;
3180 dw_die_ref new_die;
3181 struct skeleton_chain_struct *parent;
3182 }
3183 skeleton_chain_node;
3184
3185 /* Define a macro which returns nonzero for a TYPE_DECL which was
3186 implicitly generated for a type.
3187
3188 Note that, unlike the C front-end (which generates a NULL named
3189 TYPE_DECL node for each complete tagged type, each array type,
3190 and each function type node created) the C++ front-end generates
3191 a _named_ TYPE_DECL node for each tagged type node created.
3192 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3193 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3194 front-end, but for each type, tagged or not. */
3195
3196 #define TYPE_DECL_IS_STUB(decl) \
3197 (DECL_NAME (decl) == NULL_TREE \
3198 || (DECL_ARTIFICIAL (decl) \
3199 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3200 /* This is necessary for stub decls that \
3201 appear in nested inline functions. */ \
3202 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3203 && (decl_ultimate_origin (decl) \
3204 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3205
3206 /* Information concerning the compilation unit's programming
3207 language, and compiler version. */
3208
3209 /* Fixed size portion of the DWARF compilation unit header. */
3210 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3211 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3212 + (dwarf_version >= 5 ? 4 : 3))
3213
3214 /* Fixed size portion of the DWARF comdat type unit header. */
3215 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3216 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3217 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3218
3219 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3220 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3221 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3222
3223 /* Fixed size portion of public names info. */
3224 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3225
3226 /* Fixed size portion of the address range info. */
3227 #define DWARF_ARANGES_HEADER_SIZE \
3228 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3229 DWARF2_ADDR_SIZE * 2) \
3230 - DWARF_INITIAL_LENGTH_SIZE)
3231
3232 /* Size of padding portion in the address range info. It must be
3233 aligned to twice the pointer size. */
3234 #define DWARF_ARANGES_PAD_SIZE \
3235 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3236 DWARF2_ADDR_SIZE * 2) \
3237 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3238
3239 /* Use assembler line directives if available. */
3240 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3241 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3242 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3243 #else
3244 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3245 #endif
3246 #endif
3247
3248 /* Use assembler views in line directives if available. */
3249 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3250 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3251 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3252 #else
3253 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3254 #endif
3255 #endif
3256
3257 /* Return true if GCC configure detected assembler support for .loc. */
3258
3259 bool
3260 dwarf2out_default_as_loc_support (void)
3261 {
3262 return DWARF2_ASM_LINE_DEBUG_INFO;
3263 #if (GCC_VERSION >= 3000)
3264 # undef DWARF2_ASM_LINE_DEBUG_INFO
3265 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3266 #endif
3267 }
3268
3269 /* Return true if GCC configure detected assembler support for views
3270 in .loc directives. */
3271
3272 bool
3273 dwarf2out_default_as_locview_support (void)
3274 {
3275 return DWARF2_ASM_VIEW_DEBUG_INFO;
3276 #if (GCC_VERSION >= 3000)
3277 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3278 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3279 #endif
3280 }
3281
3282 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3283 view computation, and it refers to a view identifier for which we
3284 will not emit a label because it is known to map to a view number
3285 zero. We won't allocate the bitmap if we're not using assembler
3286 support for location views, but we have to make the variable
3287 visible for GGC and for code that will be optimized out for lack of
3288 support but that's still parsed and compiled. We could abstract it
3289 out with macros, but it's not worth it. */
3290 static GTY(()) bitmap zero_view_p;
3291
3292 /* Evaluate to TRUE iff N is known to identify the first location view
3293 at its PC. When not using assembler location view computation,
3294 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3295 and views label numbers recorded in it are the ones known to be
3296 zero. */
3297 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3298 || (N) == (var_loc_view)-1 \
3299 || (zero_view_p \
3300 && bitmap_bit_p (zero_view_p, (N))))
3301
3302 /* Return true iff we're to emit .loc directives for the assembler to
3303 generate line number sections.
3304
3305 When we're not emitting views, all we need from the assembler is
3306 support for .loc directives.
3307
3308 If we are emitting views, we can only use the assembler's .loc
3309 support if it also supports views.
3310
3311 When the compiler is emitting the line number programs and
3312 computing view numbers itself, it resets view numbers at known PC
3313 changes and counts from that, and then it emits view numbers as
3314 literal constants in locviewlists. There are cases in which the
3315 compiler is not sure about PC changes, e.g. when extra alignment is
3316 requested for a label. In these cases, the compiler may not reset
3317 the view counter, and the potential PC advance in the line number
3318 program will use an opcode that does not reset the view counter
3319 even if the PC actually changes, so that compiler and debug info
3320 consumer can keep view numbers in sync.
3321
3322 When the compiler defers view computation to the assembler, it
3323 emits symbolic view numbers in locviewlists, with the exception of
3324 views known to be zero (forced resets, or reset after
3325 compiler-visible PC changes): instead of emitting symbols for
3326 these, we emit literal zero and assert the assembler agrees with
3327 the compiler's assessment. We could use symbolic views everywhere,
3328 instead of special-casing zero views, but then we'd be unable to
3329 optimize out locviewlists that contain only zeros. */
3330
3331 static bool
3332 output_asm_line_debug_info (void)
3333 {
3334 return (dwarf2out_as_loc_support
3335 && (dwarf2out_as_locview_support
3336 || !debug_variable_location_views));
3337 }
3338
3339 /* Minimum line offset in a special line info. opcode.
3340 This value was chosen to give a reasonable range of values. */
3341 #define DWARF_LINE_BASE -10
3342
3343 /* First special line opcode - leave room for the standard opcodes. */
3344 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3345
3346 /* Range of line offsets in a special line info. opcode. */
3347 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3348
3349 /* Flag that indicates the initial value of the is_stmt_start flag.
3350 In the present implementation, we do not mark any lines as
3351 the beginning of a source statement, because that information
3352 is not made available by the GCC front-end. */
3353 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3354
3355 /* Maximum number of operations per instruction bundle. */
3356 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3357 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3358 #endif
3359
3360 /* This location is used by calc_die_sizes() to keep track
3361 the offset of each DIE within the .debug_info section. */
3362 static unsigned long next_die_offset;
3363
3364 /* Record the root of the DIE's built for the current compilation unit. */
3365 static GTY(()) dw_die_ref single_comp_unit_die;
3366
3367 /* A list of type DIEs that have been separated into comdat sections. */
3368 static GTY(()) comdat_type_node *comdat_type_list;
3369
3370 /* A list of CU DIEs that have been separated. */
3371 static GTY(()) limbo_die_node *cu_die_list;
3372
3373 /* A list of DIEs with a NULL parent waiting to be relocated. */
3374 static GTY(()) limbo_die_node *limbo_die_list;
3375
3376 /* A list of DIEs for which we may have to generate
3377 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3378 static GTY(()) limbo_die_node *deferred_asm_name;
3379
3380 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3381 {
3382 typedef const char *compare_type;
3383
3384 static hashval_t hash (dwarf_file_data *);
3385 static bool equal (dwarf_file_data *, const char *);
3386 };
3387
3388 /* Filenames referenced by this compilation unit. */
3389 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3390
3391 struct decl_die_hasher : ggc_ptr_hash<die_node>
3392 {
3393 typedef tree compare_type;
3394
3395 static hashval_t hash (die_node *);
3396 static bool equal (die_node *, tree);
3397 };
3398 /* A hash table of references to DIE's that describe declarations.
3399 The key is a DECL_UID() which is a unique number identifying each decl. */
3400 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3401
3402 struct GTY ((for_user)) variable_value_struct {
3403 unsigned int decl_id;
3404 vec<dw_die_ref, va_gc> *dies;
3405 };
3406
3407 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3408 {
3409 typedef tree compare_type;
3410
3411 static hashval_t hash (variable_value_struct *);
3412 static bool equal (variable_value_struct *, tree);
3413 };
3414 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3415 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3416 DECL_CONTEXT of the referenced VAR_DECLs. */
3417 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3418
3419 struct block_die_hasher : ggc_ptr_hash<die_struct>
3420 {
3421 static hashval_t hash (die_struct *);
3422 static bool equal (die_struct *, die_struct *);
3423 };
3424
3425 /* A hash table of references to DIE's that describe COMMON blocks.
3426 The key is DECL_UID() ^ die_parent. */
3427 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3428
3429 typedef struct GTY(()) die_arg_entry_struct {
3430 dw_die_ref die;
3431 tree arg;
3432 } die_arg_entry;
3433
3434
3435 /* Node of the variable location list. */
3436 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3437 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3438 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3439 in mode of the EXPR_LIST node and first EXPR_LIST operand
3440 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3441 location or NULL for padding. For larger bitsizes,
3442 mode is 0 and first operand is a CONCAT with bitsize
3443 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3444 NULL as second operand. */
3445 rtx GTY (()) loc;
3446 const char * GTY (()) label;
3447 struct var_loc_node * GTY (()) next;
3448 var_loc_view view;
3449 };
3450
3451 /* Variable location list. */
3452 struct GTY ((for_user)) var_loc_list_def {
3453 struct var_loc_node * GTY (()) first;
3454
3455 /* Pointer to the last but one or last element of the
3456 chained list. If the list is empty, both first and
3457 last are NULL, if the list contains just one node
3458 or the last node certainly is not redundant, it points
3459 to the last node, otherwise points to the last but one.
3460 Do not mark it for GC because it is marked through the chain. */
3461 struct var_loc_node * GTY ((skip ("%h"))) last;
3462
3463 /* Pointer to the last element before section switch,
3464 if NULL, either sections weren't switched or first
3465 is after section switch. */
3466 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3467
3468 /* DECL_UID of the variable decl. */
3469 unsigned int decl_id;
3470 };
3471 typedef struct var_loc_list_def var_loc_list;
3472
3473 /* Call argument location list. */
3474 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3475 rtx GTY (()) call_arg_loc_note;
3476 const char * GTY (()) label;
3477 tree GTY (()) block;
3478 bool tail_call_p;
3479 rtx GTY (()) symbol_ref;
3480 struct call_arg_loc_node * GTY (()) next;
3481 };
3482
3483
3484 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3485 {
3486 typedef const_tree compare_type;
3487
3488 static hashval_t hash (var_loc_list *);
3489 static bool equal (var_loc_list *, const_tree);
3490 };
3491
3492 /* Table of decl location linked lists. */
3493 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3494
3495 /* Head and tail of call_arg_loc chain. */
3496 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3497 static struct call_arg_loc_node *call_arg_loc_last;
3498
3499 /* Number of call sites in the current function. */
3500 static int call_site_count = -1;
3501 /* Number of tail call sites in the current function. */
3502 static int tail_call_site_count = -1;
3503
3504 /* A cached location list. */
3505 struct GTY ((for_user)) cached_dw_loc_list_def {
3506 /* The DECL_UID of the decl that this entry describes. */
3507 unsigned int decl_id;
3508
3509 /* The cached location list. */
3510 dw_loc_list_ref loc_list;
3511 };
3512 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3513
3514 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3515 {
3516
3517 typedef const_tree compare_type;
3518
3519 static hashval_t hash (cached_dw_loc_list *);
3520 static bool equal (cached_dw_loc_list *, const_tree);
3521 };
3522
3523 /* Table of cached location lists. */
3524 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3525
3526 /* A vector of references to DIE's that are uniquely identified by their tag,
3527 presence/absence of children DIE's, and list of attribute/value pairs. */
3528 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3529
3530 /* A hash map to remember the stack usage for DWARF procedures. The value
3531 stored is the stack size difference between before the DWARF procedure
3532 invokation and after it returned. In other words, for a DWARF procedure
3533 that consumes N stack slots and that pushes M ones, this stores M - N. */
3534 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3535
3536 /* A global counter for generating labels for line number data. */
3537 static unsigned int line_info_label_num;
3538
3539 /* The current table to which we should emit line number information
3540 for the current function. This will be set up at the beginning of
3541 assembly for the function. */
3542 static GTY(()) dw_line_info_table *cur_line_info_table;
3543
3544 /* The two default tables of line number info. */
3545 static GTY(()) dw_line_info_table *text_section_line_info;
3546 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3547
3548 /* The set of all non-default tables of line number info. */
3549 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3550
3551 /* A flag to tell pubnames/types export if there is an info section to
3552 refer to. */
3553 static bool info_section_emitted;
3554
3555 /* A pointer to the base of a table that contains a list of publicly
3556 accessible names. */
3557 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3558
3559 /* A pointer to the base of a table that contains a list of publicly
3560 accessible types. */
3561 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3562
3563 /* A pointer to the base of a table that contains a list of macro
3564 defines/undefines (and file start/end markers). */
3565 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3566
3567 /* True if .debug_macinfo or .debug_macros section is going to be
3568 emitted. */
3569 #define have_macinfo \
3570 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3571 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3572 && !macinfo_table->is_empty ())
3573
3574 /* Vector of dies for which we should generate .debug_ranges info. */
3575 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3576
3577 /* Vector of pairs of labels referenced in ranges_table. */
3578 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3579
3580 /* Whether we have location lists that need outputting */
3581 static GTY(()) bool have_location_lists;
3582
3583 /* Unique label counter. */
3584 static GTY(()) unsigned int loclabel_num;
3585
3586 /* Unique label counter for point-of-call tables. */
3587 static GTY(()) unsigned int poc_label_num;
3588
3589 /* The last file entry emitted by maybe_emit_file(). */
3590 static GTY(()) struct dwarf_file_data * last_emitted_file;
3591
3592 /* Number of internal labels generated by gen_internal_sym(). */
3593 static GTY(()) int label_num;
3594
3595 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3596
3597 /* Instances of generic types for which we need to generate debug
3598 info that describe their generic parameters and arguments. That
3599 generation needs to happen once all types are properly laid out so
3600 we do it at the end of compilation. */
3601 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3602
3603 /* Offset from the "steady-state frame pointer" to the frame base,
3604 within the current function. */
3605 static poly_int64 frame_pointer_fb_offset;
3606 static bool frame_pointer_fb_offset_valid;
3607
3608 static vec<dw_die_ref> base_types;
3609
3610 /* Flags to represent a set of attribute classes for attributes that represent
3611 a scalar value (bounds, pointers, ...). */
3612 enum dw_scalar_form
3613 {
3614 dw_scalar_form_constant = 0x01,
3615 dw_scalar_form_exprloc = 0x02,
3616 dw_scalar_form_reference = 0x04
3617 };
3618
3619 /* Forward declarations for functions defined in this file. */
3620
3621 static int is_pseudo_reg (const_rtx);
3622 static tree type_main_variant (tree);
3623 static int is_tagged_type (const_tree);
3624 static const char *dwarf_tag_name (unsigned);
3625 static const char *dwarf_attr_name (unsigned);
3626 static const char *dwarf_form_name (unsigned);
3627 static tree decl_ultimate_origin (const_tree);
3628 static tree decl_class_context (tree);
3629 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3630 static inline enum dw_val_class AT_class (dw_attr_node *);
3631 static inline unsigned int AT_index (dw_attr_node *);
3632 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3633 static inline unsigned AT_flag (dw_attr_node *);
3634 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3635 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3636 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3637 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3638 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3639 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3640 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3641 unsigned int, unsigned char *);
3642 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3643 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3644 static inline const char *AT_string (dw_attr_node *);
3645 static enum dwarf_form AT_string_form (dw_attr_node *);
3646 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3647 static void add_AT_specification (dw_die_ref, dw_die_ref);
3648 static inline dw_die_ref AT_ref (dw_attr_node *);
3649 static inline int AT_ref_external (dw_attr_node *);
3650 static inline void set_AT_ref_external (dw_attr_node *, int);
3651 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3652 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3653 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3654 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3655 dw_loc_list_ref);
3656 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3657 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3658 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3659 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3660 static void remove_addr_table_entry (addr_table_entry *);
3661 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3662 static inline rtx AT_addr (dw_attr_node *);
3663 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3664 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3665 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3666 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3667 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3668 const char *);
3669 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3670 unsigned HOST_WIDE_INT);
3671 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3672 unsigned long, bool);
3673 static inline const char *AT_lbl (dw_attr_node *);
3674 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3675 static const char *get_AT_low_pc (dw_die_ref);
3676 static const char *get_AT_hi_pc (dw_die_ref);
3677 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3678 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3679 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3680 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3681 static bool is_cxx (void);
3682 static bool is_cxx (const_tree);
3683 static bool is_fortran (void);
3684 static bool is_ada (void);
3685 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3686 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3687 static void add_child_die (dw_die_ref, dw_die_ref);
3688 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3689 static dw_die_ref lookup_type_die (tree);
3690 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3691 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3692 static void equate_type_number_to_die (tree, dw_die_ref);
3693 static dw_die_ref lookup_decl_die (tree);
3694 static var_loc_list *lookup_decl_loc (const_tree);
3695 static void equate_decl_number_to_die (tree, dw_die_ref);
3696 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3697 static void print_spaces (FILE *);
3698 static void print_die (dw_die_ref, FILE *);
3699 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3700 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3701 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3702 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3703 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3704 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3705 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3706 struct md5_ctx *, int *);
3707 struct checksum_attributes;
3708 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3709 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3710 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3711 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3712 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3713 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3714 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3715 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3716 static int is_type_die (dw_die_ref);
3717 static int is_comdat_die (dw_die_ref);
3718 static inline bool is_template_instantiation (dw_die_ref);
3719 static int is_declaration_die (dw_die_ref);
3720 static int should_move_die_to_comdat (dw_die_ref);
3721 static dw_die_ref clone_as_declaration (dw_die_ref);
3722 static dw_die_ref clone_die (dw_die_ref);
3723 static dw_die_ref clone_tree (dw_die_ref);
3724 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3725 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3726 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3727 static dw_die_ref generate_skeleton (dw_die_ref);
3728 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3729 dw_die_ref,
3730 dw_die_ref);
3731 static void break_out_comdat_types (dw_die_ref);
3732 static void copy_decls_for_unworthy_types (dw_die_ref);
3733
3734 static void add_sibling_attributes (dw_die_ref);
3735 static void output_location_lists (dw_die_ref);
3736 static int constant_size (unsigned HOST_WIDE_INT);
3737 static unsigned long size_of_die (dw_die_ref);
3738 static void calc_die_sizes (dw_die_ref);
3739 static void calc_base_type_die_sizes (void);
3740 static void mark_dies (dw_die_ref);
3741 static void unmark_dies (dw_die_ref);
3742 static void unmark_all_dies (dw_die_ref);
3743 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3744 static unsigned long size_of_aranges (void);
3745 static enum dwarf_form value_format (dw_attr_node *);
3746 static void output_value_format (dw_attr_node *);
3747 static void output_abbrev_section (void);
3748 static void output_die_abbrevs (unsigned long, dw_die_ref);
3749 static void output_die (dw_die_ref);
3750 static void output_compilation_unit_header (enum dwarf_unit_type);
3751 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3752 static void output_comdat_type_unit (comdat_type_node *);
3753 static const char *dwarf2_name (tree, int);
3754 static void add_pubname (tree, dw_die_ref);
3755 static void add_enumerator_pubname (const char *, dw_die_ref);
3756 static void add_pubname_string (const char *, dw_die_ref);
3757 static void add_pubtype (tree, dw_die_ref);
3758 static void output_pubnames (vec<pubname_entry, va_gc> *);
3759 static void output_aranges (void);
3760 static unsigned int add_ranges (const_tree, bool = false);
3761 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3762 bool *, bool);
3763 static void output_ranges (void);
3764 static dw_line_info_table *new_line_info_table (void);
3765 static void output_line_info (bool);
3766 static void output_file_names (void);
3767 static dw_die_ref base_type_die (tree, bool);
3768 static int is_base_type (tree);
3769 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3770 static int decl_quals (const_tree);
3771 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3772 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3773 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3774 static int type_is_enum (const_tree);
3775 static unsigned int dbx_reg_number (const_rtx);
3776 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3777 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3778 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3779 enum var_init_status);
3780 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3781 enum var_init_status);
3782 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3783 enum var_init_status);
3784 static int is_based_loc (const_rtx);
3785 static bool resolve_one_addr (rtx *);
3786 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3787 enum var_init_status);
3788 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3789 enum var_init_status);
3790 struct loc_descr_context;
3791 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3792 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3793 static dw_loc_list_ref loc_list_from_tree (tree, int,
3794 struct loc_descr_context *);
3795 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3796 struct loc_descr_context *);
3797 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3798 static tree field_type (const_tree);
3799 static unsigned int simple_type_align_in_bits (const_tree);
3800 static unsigned int simple_decl_align_in_bits (const_tree);
3801 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3802 struct vlr_context;
3803 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3804 HOST_WIDE_INT *);
3805 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3806 dw_loc_list_ref);
3807 static void add_data_member_location_attribute (dw_die_ref, tree,
3808 struct vlr_context *);
3809 static bool add_const_value_attribute (dw_die_ref, rtx);
3810 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3811 static void insert_wide_int (const wide_int &, unsigned char *, int);
3812 static void insert_float (const_rtx, unsigned char *);
3813 static rtx rtl_for_decl_location (tree);
3814 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3815 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3816 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3817 static void add_name_attribute (dw_die_ref, const char *);
3818 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3819 static void add_comp_dir_attribute (dw_die_ref);
3820 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3821 struct loc_descr_context *);
3822 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3823 struct loc_descr_context *);
3824 static void add_subscript_info (dw_die_ref, tree, bool);
3825 static void add_byte_size_attribute (dw_die_ref, tree);
3826 static void add_alignment_attribute (dw_die_ref, tree);
3827 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3828 struct vlr_context *);
3829 static void add_bit_size_attribute (dw_die_ref, tree);
3830 static void add_prototyped_attribute (dw_die_ref, tree);
3831 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3832 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3833 static void add_src_coords_attributes (dw_die_ref, tree);
3834 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3835 static void add_discr_value (dw_die_ref, dw_discr_value *);
3836 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3837 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3838 static void push_decl_scope (tree);
3839 static void pop_decl_scope (void);
3840 static dw_die_ref scope_die_for (tree, dw_die_ref);
3841 static inline int local_scope_p (dw_die_ref);
3842 static inline int class_scope_p (dw_die_ref);
3843 static inline int class_or_namespace_scope_p (dw_die_ref);
3844 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3845 static void add_calling_convention_attribute (dw_die_ref, tree);
3846 static const char *type_tag (const_tree);
3847 static tree member_declared_type (const_tree);
3848 #if 0
3849 static const char *decl_start_label (tree);
3850 #endif
3851 static void gen_array_type_die (tree, dw_die_ref);
3852 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3853 #if 0
3854 static void gen_entry_point_die (tree, dw_die_ref);
3855 #endif
3856 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3857 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3858 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3859 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3860 static void gen_formal_types_die (tree, dw_die_ref);
3861 static void gen_subprogram_die (tree, dw_die_ref);
3862 static void gen_variable_die (tree, tree, dw_die_ref);
3863 static void gen_const_die (tree, dw_die_ref);
3864 static void gen_label_die (tree, dw_die_ref);
3865 static void gen_lexical_block_die (tree, dw_die_ref);
3866 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3867 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3868 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3869 static dw_die_ref gen_compile_unit_die (const char *);
3870 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3871 static void gen_member_die (tree, dw_die_ref);
3872 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3873 enum debug_info_usage);
3874 static void gen_subroutine_type_die (tree, dw_die_ref);
3875 static void gen_typedef_die (tree, dw_die_ref);
3876 static void gen_type_die (tree, dw_die_ref);
3877 static void gen_block_die (tree, dw_die_ref);
3878 static void decls_for_scope (tree, dw_die_ref);
3879 static bool is_naming_typedef_decl (const_tree);
3880 static inline dw_die_ref get_context_die (tree);
3881 static void gen_namespace_die (tree, dw_die_ref);
3882 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3883 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3884 static dw_die_ref force_decl_die (tree);
3885 static dw_die_ref force_type_die (tree);
3886 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3887 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3888 static struct dwarf_file_data * lookup_filename (const char *);
3889 static void retry_incomplete_types (void);
3890 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3891 static void gen_generic_params_dies (tree);
3892 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3893 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3894 static void splice_child_die (dw_die_ref, dw_die_ref);
3895 static int file_info_cmp (const void *, const void *);
3896 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3897 const char *, var_loc_view, const char *);
3898 static void output_loc_list (dw_loc_list_ref);
3899 static char *gen_internal_sym (const char *);
3900 static bool want_pubnames (void);
3901
3902 static void prune_unmark_dies (dw_die_ref);
3903 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3904 static void prune_unused_types_mark (dw_die_ref, int);
3905 static void prune_unused_types_walk (dw_die_ref);
3906 static void prune_unused_types_walk_attribs (dw_die_ref);
3907 static void prune_unused_types_prune (dw_die_ref);
3908 static void prune_unused_types (void);
3909 static int maybe_emit_file (struct dwarf_file_data *fd);
3910 static inline const char *AT_vms_delta1 (dw_attr_node *);
3911 static inline const char *AT_vms_delta2 (dw_attr_node *);
3912 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3913 const char *, const char *);
3914 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3915 static void gen_remaining_tmpl_value_param_die_attribute (void);
3916 static bool generic_type_p (tree);
3917 static void schedule_generic_params_dies_gen (tree t);
3918 static void gen_scheduled_generic_parms_dies (void);
3919 static void resolve_variable_values (void);
3920
3921 static const char *comp_dir_string (void);
3922
3923 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3924
3925 /* enum for tracking thread-local variables whose address is really an offset
3926 relative to the TLS pointer, which will need link-time relocation, but will
3927 not need relocation by the DWARF consumer. */
3928
3929 enum dtprel_bool
3930 {
3931 dtprel_false = 0,
3932 dtprel_true = 1
3933 };
3934
3935 /* Return the operator to use for an address of a variable. For dtprel_true, we
3936 use DW_OP_const*. For regular variables, which need both link-time
3937 relocation and consumer-level relocation (e.g., to account for shared objects
3938 loaded at a random address), we use DW_OP_addr*. */
3939
3940 static inline enum dwarf_location_atom
3941 dw_addr_op (enum dtprel_bool dtprel)
3942 {
3943 if (dtprel == dtprel_true)
3944 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3945 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3946 else
3947 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3948 }
3949
3950 /* Return a pointer to a newly allocated address location description. If
3951 dwarf_split_debug_info is true, then record the address with the appropriate
3952 relocation. */
3953 static inline dw_loc_descr_ref
3954 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3955 {
3956 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3957
3958 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3959 ref->dw_loc_oprnd1.v.val_addr = addr;
3960 ref->dtprel = dtprel;
3961 if (dwarf_split_debug_info)
3962 ref->dw_loc_oprnd1.val_entry
3963 = add_addr_table_entry (addr,
3964 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3965 else
3966 ref->dw_loc_oprnd1.val_entry = NULL;
3967
3968 return ref;
3969 }
3970
3971 /* Section names used to hold DWARF debugging information. */
3972
3973 #ifndef DEBUG_INFO_SECTION
3974 #define DEBUG_INFO_SECTION ".debug_info"
3975 #endif
3976 #ifndef DEBUG_DWO_INFO_SECTION
3977 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3978 #endif
3979 #ifndef DEBUG_LTO_INFO_SECTION
3980 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3981 #endif
3982 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3983 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3984 #endif
3985 #ifndef DEBUG_ABBREV_SECTION
3986 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3987 #endif
3988 #ifndef DEBUG_LTO_ABBREV_SECTION
3989 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3990 #endif
3991 #ifndef DEBUG_DWO_ABBREV_SECTION
3992 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3993 #endif
3994 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3995 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3996 #endif
3997 #ifndef DEBUG_ARANGES_SECTION
3998 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3999 #endif
4000 #ifndef DEBUG_ADDR_SECTION
4001 #define DEBUG_ADDR_SECTION ".debug_addr"
4002 #endif
4003 #ifndef DEBUG_MACINFO_SECTION
4004 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
4005 #endif
4006 #ifndef DEBUG_LTO_MACINFO_SECTION
4007 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
4008 #endif
4009 #ifndef DEBUG_DWO_MACINFO_SECTION
4010 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4011 #endif
4012 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4013 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4014 #endif
4015 #ifndef DEBUG_MACRO_SECTION
4016 #define DEBUG_MACRO_SECTION ".debug_macro"
4017 #endif
4018 #ifndef DEBUG_LTO_MACRO_SECTION
4019 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4020 #endif
4021 #ifndef DEBUG_DWO_MACRO_SECTION
4022 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4023 #endif
4024 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4025 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4026 #endif
4027 #ifndef DEBUG_LINE_SECTION
4028 #define DEBUG_LINE_SECTION ".debug_line"
4029 #endif
4030 #ifndef DEBUG_LTO_LINE_SECTION
4031 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4032 #endif
4033 #ifndef DEBUG_DWO_LINE_SECTION
4034 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4035 #endif
4036 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4037 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4038 #endif
4039 #ifndef DEBUG_LOC_SECTION
4040 #define DEBUG_LOC_SECTION ".debug_loc"
4041 #endif
4042 #ifndef DEBUG_DWO_LOC_SECTION
4043 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4044 #endif
4045 #ifndef DEBUG_LOCLISTS_SECTION
4046 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4047 #endif
4048 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4049 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4050 #endif
4051 #ifndef DEBUG_PUBNAMES_SECTION
4052 #define DEBUG_PUBNAMES_SECTION \
4053 ((debug_generate_pub_sections == 2) \
4054 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4055 #endif
4056 #ifndef DEBUG_PUBTYPES_SECTION
4057 #define DEBUG_PUBTYPES_SECTION \
4058 ((debug_generate_pub_sections == 2) \
4059 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4060 #endif
4061 #ifndef DEBUG_STR_OFFSETS_SECTION
4062 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4063 #endif
4064 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4065 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4066 #endif
4067 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4068 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4069 #endif
4070 #ifndef DEBUG_STR_SECTION
4071 #define DEBUG_STR_SECTION ".debug_str"
4072 #endif
4073 #ifndef DEBUG_LTO_STR_SECTION
4074 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4075 #endif
4076 #ifndef DEBUG_STR_DWO_SECTION
4077 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4078 #endif
4079 #ifndef DEBUG_LTO_STR_DWO_SECTION
4080 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4081 #endif
4082 #ifndef DEBUG_RANGES_SECTION
4083 #define DEBUG_RANGES_SECTION ".debug_ranges"
4084 #endif
4085 #ifndef DEBUG_RNGLISTS_SECTION
4086 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4087 #endif
4088 #ifndef DEBUG_LINE_STR_SECTION
4089 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4090 #endif
4091 #ifndef DEBUG_LTO_LINE_STR_SECTION
4092 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4093 #endif
4094
4095 /* Standard ELF section names for compiled code and data. */
4096 #ifndef TEXT_SECTION_NAME
4097 #define TEXT_SECTION_NAME ".text"
4098 #endif
4099
4100 /* Section flags for .debug_str section. */
4101 #define DEBUG_STR_SECTION_FLAGS \
4102 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4103 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4104 : SECTION_DEBUG)
4105
4106 /* Section flags for .debug_str.dwo section. */
4107 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4108
4109 /* Attribute used to refer to the macro section. */
4110 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4111 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4112
4113 /* Labels we insert at beginning sections we can reference instead of
4114 the section names themselves. */
4115
4116 #ifndef TEXT_SECTION_LABEL
4117 #define TEXT_SECTION_LABEL "Ltext"
4118 #endif
4119 #ifndef COLD_TEXT_SECTION_LABEL
4120 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4121 #endif
4122 #ifndef DEBUG_LINE_SECTION_LABEL
4123 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4124 #endif
4125 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4126 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4127 #endif
4128 #ifndef DEBUG_INFO_SECTION_LABEL
4129 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4130 #endif
4131 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4132 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4133 #endif
4134 #ifndef DEBUG_ABBREV_SECTION_LABEL
4135 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4136 #endif
4137 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4138 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4139 #endif
4140 #ifndef DEBUG_ADDR_SECTION_LABEL
4141 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4142 #endif
4143 #ifndef DEBUG_LOC_SECTION_LABEL
4144 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4145 #endif
4146 #ifndef DEBUG_RANGES_SECTION_LABEL
4147 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4148 #endif
4149 #ifndef DEBUG_MACINFO_SECTION_LABEL
4150 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4151 #endif
4152 #ifndef DEBUG_MACRO_SECTION_LABEL
4153 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4154 #endif
4155 #define SKELETON_COMP_DIE_ABBREV 1
4156 #define SKELETON_TYPE_DIE_ABBREV 2
4157
4158 /* Definitions of defaults for formats and names of various special
4159 (artificial) labels which may be generated within this file (when the -g
4160 options is used and DWARF2_DEBUGGING_INFO is in effect.
4161 If necessary, these may be overridden from within the tm.h file, but
4162 typically, overriding these defaults is unnecessary. */
4163
4164 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4170 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4171 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4172 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4173 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4174 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4175 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4176 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4177 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4178 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4179
4180 #ifndef TEXT_END_LABEL
4181 #define TEXT_END_LABEL "Letext"
4182 #endif
4183 #ifndef COLD_END_LABEL
4184 #define COLD_END_LABEL "Letext_cold"
4185 #endif
4186 #ifndef BLOCK_BEGIN_LABEL
4187 #define BLOCK_BEGIN_LABEL "LBB"
4188 #endif
4189 #ifndef BLOCK_INLINE_ENTRY_LABEL
4190 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4191 #endif
4192 #ifndef BLOCK_END_LABEL
4193 #define BLOCK_END_LABEL "LBE"
4194 #endif
4195 #ifndef LINE_CODE_LABEL
4196 #define LINE_CODE_LABEL "LM"
4197 #endif
4198
4199 \f
4200 /* Return the root of the DIE's built for the current compilation unit. */
4201 static dw_die_ref
4202 comp_unit_die (void)
4203 {
4204 if (!single_comp_unit_die)
4205 single_comp_unit_die = gen_compile_unit_die (NULL);
4206 return single_comp_unit_die;
4207 }
4208
4209 /* We allow a language front-end to designate a function that is to be
4210 called to "demangle" any name before it is put into a DIE. */
4211
4212 static const char *(*demangle_name_func) (const char *);
4213
4214 void
4215 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4216 {
4217 demangle_name_func = func;
4218 }
4219
4220 /* Test if rtl node points to a pseudo register. */
4221
4222 static inline int
4223 is_pseudo_reg (const_rtx rtl)
4224 {
4225 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4226 || (GET_CODE (rtl) == SUBREG
4227 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4228 }
4229
4230 /* Return a reference to a type, with its const and volatile qualifiers
4231 removed. */
4232
4233 static inline tree
4234 type_main_variant (tree type)
4235 {
4236 type = TYPE_MAIN_VARIANT (type);
4237
4238 /* ??? There really should be only one main variant among any group of
4239 variants of a given type (and all of the MAIN_VARIANT values for all
4240 members of the group should point to that one type) but sometimes the C
4241 front-end messes this up for array types, so we work around that bug
4242 here. */
4243 if (TREE_CODE (type) == ARRAY_TYPE)
4244 while (type != TYPE_MAIN_VARIANT (type))
4245 type = TYPE_MAIN_VARIANT (type);
4246
4247 return type;
4248 }
4249
4250 /* Return nonzero if the given type node represents a tagged type. */
4251
4252 static inline int
4253 is_tagged_type (const_tree type)
4254 {
4255 enum tree_code code = TREE_CODE (type);
4256
4257 return (code == RECORD_TYPE || code == UNION_TYPE
4258 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4259 }
4260
4261 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4262
4263 static void
4264 get_ref_die_offset_label (char *label, dw_die_ref ref)
4265 {
4266 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4267 }
4268
4269 /* Return die_offset of a DIE reference to a base type. */
4270
4271 static unsigned long int
4272 get_base_type_offset (dw_die_ref ref)
4273 {
4274 if (ref->die_offset)
4275 return ref->die_offset;
4276 if (comp_unit_die ()->die_abbrev)
4277 {
4278 calc_base_type_die_sizes ();
4279 gcc_assert (ref->die_offset);
4280 }
4281 return ref->die_offset;
4282 }
4283
4284 /* Return die_offset of a DIE reference other than base type. */
4285
4286 static unsigned long int
4287 get_ref_die_offset (dw_die_ref ref)
4288 {
4289 gcc_assert (ref->die_offset);
4290 return ref->die_offset;
4291 }
4292
4293 /* Convert a DIE tag into its string name. */
4294
4295 static const char *
4296 dwarf_tag_name (unsigned int tag)
4297 {
4298 const char *name = get_DW_TAG_name (tag);
4299
4300 if (name != NULL)
4301 return name;
4302
4303 return "DW_TAG_<unknown>";
4304 }
4305
4306 /* Convert a DWARF attribute code into its string name. */
4307
4308 static const char *
4309 dwarf_attr_name (unsigned int attr)
4310 {
4311 const char *name;
4312
4313 switch (attr)
4314 {
4315 #if VMS_DEBUGGING_INFO
4316 case DW_AT_HP_prologue:
4317 return "DW_AT_HP_prologue";
4318 #else
4319 case DW_AT_MIPS_loop_unroll_factor:
4320 return "DW_AT_MIPS_loop_unroll_factor";
4321 #endif
4322
4323 #if VMS_DEBUGGING_INFO
4324 case DW_AT_HP_epilogue:
4325 return "DW_AT_HP_epilogue";
4326 #else
4327 case DW_AT_MIPS_stride:
4328 return "DW_AT_MIPS_stride";
4329 #endif
4330 }
4331
4332 name = get_DW_AT_name (attr);
4333
4334 if (name != NULL)
4335 return name;
4336
4337 return "DW_AT_<unknown>";
4338 }
4339
4340 /* Convert a DWARF value form code into its string name. */
4341
4342 static const char *
4343 dwarf_form_name (unsigned int form)
4344 {
4345 const char *name = get_DW_FORM_name (form);
4346
4347 if (name != NULL)
4348 return name;
4349
4350 return "DW_FORM_<unknown>";
4351 }
4352 \f
4353 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4354 instance of an inlined instance of a decl which is local to an inline
4355 function, so we have to trace all of the way back through the origin chain
4356 to find out what sort of node actually served as the original seed for the
4357 given block. */
4358
4359 static tree
4360 decl_ultimate_origin (const_tree decl)
4361 {
4362 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4363 return NULL_TREE;
4364
4365 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4366 we're trying to output the abstract instance of this function. */
4367 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4368 return NULL_TREE;
4369
4370 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4371 most distant ancestor, this should never happen. */
4372 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4373
4374 return DECL_ABSTRACT_ORIGIN (decl);
4375 }
4376
4377 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4378 of a virtual function may refer to a base class, so we check the 'this'
4379 parameter. */
4380
4381 static tree
4382 decl_class_context (tree decl)
4383 {
4384 tree context = NULL_TREE;
4385
4386 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4387 context = DECL_CONTEXT (decl);
4388 else
4389 context = TYPE_MAIN_VARIANT
4390 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4391
4392 if (context && !TYPE_P (context))
4393 context = NULL_TREE;
4394
4395 return context;
4396 }
4397 \f
4398 /* Add an attribute/value pair to a DIE. */
4399
4400 static inline void
4401 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4402 {
4403 /* Maybe this should be an assert? */
4404 if (die == NULL)
4405 return;
4406
4407 if (flag_checking)
4408 {
4409 /* Check we do not add duplicate attrs. Can't use get_AT here
4410 because that recurses to the specification/abstract origin DIE. */
4411 dw_attr_node *a;
4412 unsigned ix;
4413 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4414 gcc_assert (a->dw_attr != attr->dw_attr);
4415 }
4416
4417 vec_safe_reserve (die->die_attr, 1);
4418 vec_safe_push (die->die_attr, *attr);
4419 }
4420
4421 static inline enum dw_val_class
4422 AT_class (dw_attr_node *a)
4423 {
4424 return a->dw_attr_val.val_class;
4425 }
4426
4427 /* Return the index for any attribute that will be referenced with a
4428 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4429 indices are stored in dw_attr_val.v.val_str for reference counting
4430 pruning. */
4431
4432 static inline unsigned int
4433 AT_index (dw_attr_node *a)
4434 {
4435 if (AT_class (a) == dw_val_class_str)
4436 return a->dw_attr_val.v.val_str->index;
4437 else if (a->dw_attr_val.val_entry != NULL)
4438 return a->dw_attr_val.val_entry->index;
4439 return NOT_INDEXED;
4440 }
4441
4442 /* Add a flag value attribute to a DIE. */
4443
4444 static inline void
4445 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4446 {
4447 dw_attr_node attr;
4448
4449 attr.dw_attr = attr_kind;
4450 attr.dw_attr_val.val_class = dw_val_class_flag;
4451 attr.dw_attr_val.val_entry = NULL;
4452 attr.dw_attr_val.v.val_flag = flag;
4453 add_dwarf_attr (die, &attr);
4454 }
4455
4456 static inline unsigned
4457 AT_flag (dw_attr_node *a)
4458 {
4459 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4460 return a->dw_attr_val.v.val_flag;
4461 }
4462
4463 /* Add a signed integer attribute value to a DIE. */
4464
4465 static inline void
4466 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4467 {
4468 dw_attr_node attr;
4469
4470 attr.dw_attr = attr_kind;
4471 attr.dw_attr_val.val_class = dw_val_class_const;
4472 attr.dw_attr_val.val_entry = NULL;
4473 attr.dw_attr_val.v.val_int = int_val;
4474 add_dwarf_attr (die, &attr);
4475 }
4476
4477 static inline HOST_WIDE_INT
4478 AT_int (dw_attr_node *a)
4479 {
4480 gcc_assert (a && (AT_class (a) == dw_val_class_const
4481 || AT_class (a) == dw_val_class_const_implicit));
4482 return a->dw_attr_val.v.val_int;
4483 }
4484
4485 /* Add an unsigned integer attribute value to a DIE. */
4486
4487 static inline void
4488 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4489 unsigned HOST_WIDE_INT unsigned_val)
4490 {
4491 dw_attr_node attr;
4492
4493 attr.dw_attr = attr_kind;
4494 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4495 attr.dw_attr_val.val_entry = NULL;
4496 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4497 add_dwarf_attr (die, &attr);
4498 }
4499
4500 static inline unsigned HOST_WIDE_INT
4501 AT_unsigned (dw_attr_node *a)
4502 {
4503 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4504 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4505 return a->dw_attr_val.v.val_unsigned;
4506 }
4507
4508 /* Add an unsigned wide integer attribute value to a DIE. */
4509
4510 static inline void
4511 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4512 const wide_int& w)
4513 {
4514 dw_attr_node attr;
4515
4516 attr.dw_attr = attr_kind;
4517 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4518 attr.dw_attr_val.val_entry = NULL;
4519 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4520 *attr.dw_attr_val.v.val_wide = w;
4521 add_dwarf_attr (die, &attr);
4522 }
4523
4524 /* Add an unsigned double integer attribute value to a DIE. */
4525
4526 static inline void
4527 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4528 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4529 {
4530 dw_attr_node attr;
4531
4532 attr.dw_attr = attr_kind;
4533 attr.dw_attr_val.val_class = dw_val_class_const_double;
4534 attr.dw_attr_val.val_entry = NULL;
4535 attr.dw_attr_val.v.val_double.high = high;
4536 attr.dw_attr_val.v.val_double.low = low;
4537 add_dwarf_attr (die, &attr);
4538 }
4539
4540 /* Add a floating point attribute value to a DIE and return it. */
4541
4542 static inline void
4543 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4544 unsigned int length, unsigned int elt_size, unsigned char *array)
4545 {
4546 dw_attr_node attr;
4547
4548 attr.dw_attr = attr_kind;
4549 attr.dw_attr_val.val_class = dw_val_class_vec;
4550 attr.dw_attr_val.val_entry = NULL;
4551 attr.dw_attr_val.v.val_vec.length = length;
4552 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4553 attr.dw_attr_val.v.val_vec.array = array;
4554 add_dwarf_attr (die, &attr);
4555 }
4556
4557 /* Add an 8-byte data attribute value to a DIE. */
4558
4559 static inline void
4560 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4561 unsigned char data8[8])
4562 {
4563 dw_attr_node attr;
4564
4565 attr.dw_attr = attr_kind;
4566 attr.dw_attr_val.val_class = dw_val_class_data8;
4567 attr.dw_attr_val.val_entry = NULL;
4568 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4569 add_dwarf_attr (die, &attr);
4570 }
4571
4572 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4573 dwarf_split_debug_info, address attributes in dies destined for the
4574 final executable have force_direct set to avoid using indexed
4575 references. */
4576
4577 static inline void
4578 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4579 bool force_direct)
4580 {
4581 dw_attr_node attr;
4582 char * lbl_id;
4583
4584 lbl_id = xstrdup (lbl_low);
4585 attr.dw_attr = DW_AT_low_pc;
4586 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4587 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4588 if (dwarf_split_debug_info && !force_direct)
4589 attr.dw_attr_val.val_entry
4590 = add_addr_table_entry (lbl_id, ate_kind_label);
4591 else
4592 attr.dw_attr_val.val_entry = NULL;
4593 add_dwarf_attr (die, &attr);
4594
4595 attr.dw_attr = DW_AT_high_pc;
4596 if (dwarf_version < 4)
4597 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4598 else
4599 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4600 lbl_id = xstrdup (lbl_high);
4601 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4602 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4603 && dwarf_split_debug_info && !force_direct)
4604 attr.dw_attr_val.val_entry
4605 = add_addr_table_entry (lbl_id, ate_kind_label);
4606 else
4607 attr.dw_attr_val.val_entry = NULL;
4608 add_dwarf_attr (die, &attr);
4609 }
4610
4611 /* Hash and equality functions for debug_str_hash. */
4612
4613 hashval_t
4614 indirect_string_hasher::hash (indirect_string_node *x)
4615 {
4616 return htab_hash_string (x->str);
4617 }
4618
4619 bool
4620 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4621 {
4622 return strcmp (x1->str, x2) == 0;
4623 }
4624
4625 /* Add STR to the given string hash table. */
4626
4627 static struct indirect_string_node *
4628 find_AT_string_in_table (const char *str,
4629 hash_table<indirect_string_hasher> *table)
4630 {
4631 struct indirect_string_node *node;
4632
4633 indirect_string_node **slot
4634 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4635 if (*slot == NULL)
4636 {
4637 node = ggc_cleared_alloc<indirect_string_node> ();
4638 node->str = ggc_strdup (str);
4639 *slot = node;
4640 }
4641 else
4642 node = *slot;
4643
4644 node->refcount++;
4645 return node;
4646 }
4647
4648 /* Add STR to the indirect string hash table. */
4649
4650 static struct indirect_string_node *
4651 find_AT_string (const char *str)
4652 {
4653 if (! debug_str_hash)
4654 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4655
4656 return find_AT_string_in_table (str, debug_str_hash);
4657 }
4658
4659 /* Add a string attribute value to a DIE. */
4660
4661 static inline void
4662 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4663 {
4664 dw_attr_node attr;
4665 struct indirect_string_node *node;
4666
4667 node = find_AT_string (str);
4668
4669 attr.dw_attr = attr_kind;
4670 attr.dw_attr_val.val_class = dw_val_class_str;
4671 attr.dw_attr_val.val_entry = NULL;
4672 attr.dw_attr_val.v.val_str = node;
4673 add_dwarf_attr (die, &attr);
4674 }
4675
4676 static inline const char *
4677 AT_string (dw_attr_node *a)
4678 {
4679 gcc_assert (a && AT_class (a) == dw_val_class_str);
4680 return a->dw_attr_val.v.val_str->str;
4681 }
4682
4683 /* Call this function directly to bypass AT_string_form's logic to put
4684 the string inline in the die. */
4685
4686 static void
4687 set_indirect_string (struct indirect_string_node *node)
4688 {
4689 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4690 /* Already indirect is a no op. */
4691 if (node->form == DW_FORM_strp
4692 || node->form == DW_FORM_line_strp
4693 || node->form == dwarf_FORM (DW_FORM_strx))
4694 {
4695 gcc_assert (node->label);
4696 return;
4697 }
4698 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4699 ++dw2_string_counter;
4700 node->label = xstrdup (label);
4701
4702 if (!dwarf_split_debug_info)
4703 {
4704 node->form = DW_FORM_strp;
4705 node->index = NOT_INDEXED;
4706 }
4707 else
4708 {
4709 node->form = dwarf_FORM (DW_FORM_strx);
4710 node->index = NO_INDEX_ASSIGNED;
4711 }
4712 }
4713
4714 /* A helper function for dwarf2out_finish, called to reset indirect
4715 string decisions done for early LTO dwarf output before fat object
4716 dwarf output. */
4717
4718 int
4719 reset_indirect_string (indirect_string_node **h, void *)
4720 {
4721 struct indirect_string_node *node = *h;
4722 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4723 {
4724 free (node->label);
4725 node->label = NULL;
4726 node->form = (dwarf_form) 0;
4727 node->index = 0;
4728 }
4729 return 1;
4730 }
4731
4732 /* Find out whether a string should be output inline in DIE
4733 or out-of-line in .debug_str section. */
4734
4735 static enum dwarf_form
4736 find_string_form (struct indirect_string_node *node)
4737 {
4738 unsigned int len;
4739
4740 if (node->form)
4741 return node->form;
4742
4743 len = strlen (node->str) + 1;
4744
4745 /* If the string is shorter or equal to the size of the reference, it is
4746 always better to put it inline. */
4747 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4748 return node->form = DW_FORM_string;
4749
4750 /* If we cannot expect the linker to merge strings in .debug_str
4751 section, only put it into .debug_str if it is worth even in this
4752 single module. */
4753 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4754 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4755 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4756 return node->form = DW_FORM_string;
4757
4758 set_indirect_string (node);
4759
4760 return node->form;
4761 }
4762
4763 /* Find out whether the string referenced from the attribute should be
4764 output inline in DIE or out-of-line in .debug_str section. */
4765
4766 static enum dwarf_form
4767 AT_string_form (dw_attr_node *a)
4768 {
4769 gcc_assert (a && AT_class (a) == dw_val_class_str);
4770 return find_string_form (a->dw_attr_val.v.val_str);
4771 }
4772
4773 /* Add a DIE reference attribute value to a DIE. */
4774
4775 static inline void
4776 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4777 {
4778 dw_attr_node attr;
4779 gcc_checking_assert (targ_die != NULL);
4780
4781 /* With LTO we can end up trying to reference something we didn't create
4782 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4783 if (targ_die == NULL)
4784 return;
4785
4786 attr.dw_attr = attr_kind;
4787 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4788 attr.dw_attr_val.val_entry = NULL;
4789 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4790 attr.dw_attr_val.v.val_die_ref.external = 0;
4791 add_dwarf_attr (die, &attr);
4792 }
4793
4794 /* Change DIE reference REF to point to NEW_DIE instead. */
4795
4796 static inline void
4797 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4798 {
4799 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4800 ref->dw_attr_val.v.val_die_ref.die = new_die;
4801 ref->dw_attr_val.v.val_die_ref.external = 0;
4802 }
4803
4804 /* Add an AT_specification attribute to a DIE, and also make the back
4805 pointer from the specification to the definition. */
4806
4807 static inline void
4808 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4809 {
4810 add_AT_die_ref (die, DW_AT_specification, targ_die);
4811 gcc_assert (!targ_die->die_definition);
4812 targ_die->die_definition = die;
4813 }
4814
4815 static inline dw_die_ref
4816 AT_ref (dw_attr_node *a)
4817 {
4818 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4819 return a->dw_attr_val.v.val_die_ref.die;
4820 }
4821
4822 static inline int
4823 AT_ref_external (dw_attr_node *a)
4824 {
4825 if (a && AT_class (a) == dw_val_class_die_ref)
4826 return a->dw_attr_val.v.val_die_ref.external;
4827
4828 return 0;
4829 }
4830
4831 static inline void
4832 set_AT_ref_external (dw_attr_node *a, int i)
4833 {
4834 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4835 a->dw_attr_val.v.val_die_ref.external = i;
4836 }
4837
4838 /* Add an FDE reference attribute value to a DIE. */
4839
4840 static inline void
4841 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4842 {
4843 dw_attr_node attr;
4844
4845 attr.dw_attr = attr_kind;
4846 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4847 attr.dw_attr_val.val_entry = NULL;
4848 attr.dw_attr_val.v.val_fde_index = targ_fde;
4849 add_dwarf_attr (die, &attr);
4850 }
4851
4852 /* Add a location description attribute value to a DIE. */
4853
4854 static inline void
4855 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4856 {
4857 dw_attr_node attr;
4858
4859 attr.dw_attr = attr_kind;
4860 attr.dw_attr_val.val_class = dw_val_class_loc;
4861 attr.dw_attr_val.val_entry = NULL;
4862 attr.dw_attr_val.v.val_loc = loc;
4863 add_dwarf_attr (die, &attr);
4864 }
4865
4866 static inline dw_loc_descr_ref
4867 AT_loc (dw_attr_node *a)
4868 {
4869 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4870 return a->dw_attr_val.v.val_loc;
4871 }
4872
4873 static inline void
4874 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4875 {
4876 dw_attr_node attr;
4877
4878 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4879 return;
4880
4881 attr.dw_attr = attr_kind;
4882 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4883 attr.dw_attr_val.val_entry = NULL;
4884 attr.dw_attr_val.v.val_loc_list = loc_list;
4885 add_dwarf_attr (die, &attr);
4886 have_location_lists = true;
4887 }
4888
4889 static inline dw_loc_list_ref
4890 AT_loc_list (dw_attr_node *a)
4891 {
4892 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4893 return a->dw_attr_val.v.val_loc_list;
4894 }
4895
4896 /* Add a view list attribute to DIE. It must have a DW_AT_location
4897 attribute, because the view list complements the location list. */
4898
4899 static inline void
4900 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4901 {
4902 dw_attr_node attr;
4903
4904 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4905 return;
4906
4907 attr.dw_attr = attr_kind;
4908 attr.dw_attr_val.val_class = dw_val_class_view_list;
4909 attr.dw_attr_val.val_entry = NULL;
4910 attr.dw_attr_val.v.val_view_list = die;
4911 add_dwarf_attr (die, &attr);
4912 gcc_checking_assert (get_AT (die, DW_AT_location));
4913 gcc_assert (have_location_lists);
4914 }
4915
4916 /* Return a pointer to the location list referenced by the attribute.
4917 If the named attribute is a view list, look up the corresponding
4918 DW_AT_location attribute and return its location list. */
4919
4920 static inline dw_loc_list_ref *
4921 AT_loc_list_ptr (dw_attr_node *a)
4922 {
4923 gcc_assert (a);
4924 switch (AT_class (a))
4925 {
4926 case dw_val_class_loc_list:
4927 return &a->dw_attr_val.v.val_loc_list;
4928 case dw_val_class_view_list:
4929 {
4930 dw_attr_node *l;
4931 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4932 if (!l)
4933 return NULL;
4934 gcc_checking_assert (l + 1 == a);
4935 return AT_loc_list_ptr (l);
4936 }
4937 default:
4938 gcc_unreachable ();
4939 }
4940 }
4941
4942 /* Return the location attribute value associated with a view list
4943 attribute value. */
4944
4945 static inline dw_val_node *
4946 view_list_to_loc_list_val_node (dw_val_node *val)
4947 {
4948 gcc_assert (val->val_class == dw_val_class_view_list);
4949 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4950 if (!loc)
4951 return NULL;
4952 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4953 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4954 return &loc->dw_attr_val;
4955 }
4956
4957 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4958 {
4959 static hashval_t hash (addr_table_entry *);
4960 static bool equal (addr_table_entry *, addr_table_entry *);
4961 };
4962
4963 /* Table of entries into the .debug_addr section. */
4964
4965 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4966
4967 /* Hash an address_table_entry. */
4968
4969 hashval_t
4970 addr_hasher::hash (addr_table_entry *a)
4971 {
4972 inchash::hash hstate;
4973 switch (a->kind)
4974 {
4975 case ate_kind_rtx:
4976 hstate.add_int (0);
4977 break;
4978 case ate_kind_rtx_dtprel:
4979 hstate.add_int (1);
4980 break;
4981 case ate_kind_label:
4982 return htab_hash_string (a->addr.label);
4983 default:
4984 gcc_unreachable ();
4985 }
4986 inchash::add_rtx (a->addr.rtl, hstate);
4987 return hstate.end ();
4988 }
4989
4990 /* Determine equality for two address_table_entries. */
4991
4992 bool
4993 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4994 {
4995 if (a1->kind != a2->kind)
4996 return 0;
4997 switch (a1->kind)
4998 {
4999 case ate_kind_rtx:
5000 case ate_kind_rtx_dtprel:
5001 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
5002 case ate_kind_label:
5003 return strcmp (a1->addr.label, a2->addr.label) == 0;
5004 default:
5005 gcc_unreachable ();
5006 }
5007 }
5008
5009 /* Initialize an addr_table_entry. */
5010
5011 void
5012 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
5013 {
5014 e->kind = kind;
5015 switch (kind)
5016 {
5017 case ate_kind_rtx:
5018 case ate_kind_rtx_dtprel:
5019 e->addr.rtl = (rtx) addr;
5020 break;
5021 case ate_kind_label:
5022 e->addr.label = (char *) addr;
5023 break;
5024 }
5025 e->refcount = 0;
5026 e->index = NO_INDEX_ASSIGNED;
5027 }
5028
5029 /* Add attr to the address table entry to the table. Defer setting an
5030 index until output time. */
5031
5032 static addr_table_entry *
5033 add_addr_table_entry (void *addr, enum ate_kind kind)
5034 {
5035 addr_table_entry *node;
5036 addr_table_entry finder;
5037
5038 gcc_assert (dwarf_split_debug_info);
5039 if (! addr_index_table)
5040 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5041 init_addr_table_entry (&finder, kind, addr);
5042 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5043
5044 if (*slot == HTAB_EMPTY_ENTRY)
5045 {
5046 node = ggc_cleared_alloc<addr_table_entry> ();
5047 init_addr_table_entry (node, kind, addr);
5048 *slot = node;
5049 }
5050 else
5051 node = *slot;
5052
5053 node->refcount++;
5054 return node;
5055 }
5056
5057 /* Remove an entry from the addr table by decrementing its refcount.
5058 Strictly, decrementing the refcount would be enough, but the
5059 assertion that the entry is actually in the table has found
5060 bugs. */
5061
5062 static void
5063 remove_addr_table_entry (addr_table_entry *entry)
5064 {
5065 gcc_assert (dwarf_split_debug_info && addr_index_table);
5066 /* After an index is assigned, the table is frozen. */
5067 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5068 entry->refcount--;
5069 }
5070
5071 /* Given a location list, remove all addresses it refers to from the
5072 address_table. */
5073
5074 static void
5075 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5076 {
5077 for (; descr; descr = descr->dw_loc_next)
5078 if (descr->dw_loc_oprnd1.val_entry != NULL)
5079 {
5080 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5081 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5082 }
5083 }
5084
5085 /* A helper function for dwarf2out_finish called through
5086 htab_traverse. Assign an addr_table_entry its index. All entries
5087 must be collected into the table when this function is called,
5088 because the indexing code relies on htab_traverse to traverse nodes
5089 in the same order for each run. */
5090
5091 int
5092 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5093 {
5094 addr_table_entry *node = *h;
5095
5096 /* Don't index unreferenced nodes. */
5097 if (node->refcount == 0)
5098 return 1;
5099
5100 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5101 node->index = *index;
5102 *index += 1;
5103
5104 return 1;
5105 }
5106
5107 /* Add an address constant attribute value to a DIE. When using
5108 dwarf_split_debug_info, address attributes in dies destined for the
5109 final executable should be direct references--setting the parameter
5110 force_direct ensures this behavior. */
5111
5112 static inline void
5113 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5114 bool force_direct)
5115 {
5116 dw_attr_node attr;
5117
5118 attr.dw_attr = attr_kind;
5119 attr.dw_attr_val.val_class = dw_val_class_addr;
5120 attr.dw_attr_val.v.val_addr = addr;
5121 if (dwarf_split_debug_info && !force_direct)
5122 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5123 else
5124 attr.dw_attr_val.val_entry = NULL;
5125 add_dwarf_attr (die, &attr);
5126 }
5127
5128 /* Get the RTX from to an address DIE attribute. */
5129
5130 static inline rtx
5131 AT_addr (dw_attr_node *a)
5132 {
5133 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5134 return a->dw_attr_val.v.val_addr;
5135 }
5136
5137 /* Add a file attribute value to a DIE. */
5138
5139 static inline void
5140 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5141 struct dwarf_file_data *fd)
5142 {
5143 dw_attr_node attr;
5144
5145 attr.dw_attr = attr_kind;
5146 attr.dw_attr_val.val_class = dw_val_class_file;
5147 attr.dw_attr_val.val_entry = NULL;
5148 attr.dw_attr_val.v.val_file = fd;
5149 add_dwarf_attr (die, &attr);
5150 }
5151
5152 /* Get the dwarf_file_data from a file DIE attribute. */
5153
5154 static inline struct dwarf_file_data *
5155 AT_file (dw_attr_node *a)
5156 {
5157 gcc_assert (a && (AT_class (a) == dw_val_class_file
5158 || AT_class (a) == dw_val_class_file_implicit));
5159 return a->dw_attr_val.v.val_file;
5160 }
5161
5162 /* Add a vms delta attribute value to a DIE. */
5163
5164 static inline void
5165 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5166 const char *lbl1, const char *lbl2)
5167 {
5168 dw_attr_node attr;
5169
5170 attr.dw_attr = attr_kind;
5171 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5172 attr.dw_attr_val.val_entry = NULL;
5173 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5174 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5175 add_dwarf_attr (die, &attr);
5176 }
5177
5178 /* Add a symbolic view identifier attribute value to a DIE. */
5179
5180 static inline void
5181 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5182 const char *view_label)
5183 {
5184 dw_attr_node attr;
5185
5186 attr.dw_attr = attr_kind;
5187 attr.dw_attr_val.val_class = dw_val_class_symview;
5188 attr.dw_attr_val.val_entry = NULL;
5189 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5190 add_dwarf_attr (die, &attr);
5191 }
5192
5193 /* Add a label identifier attribute value to a DIE. */
5194
5195 static inline void
5196 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5197 const char *lbl_id)
5198 {
5199 dw_attr_node attr;
5200
5201 attr.dw_attr = attr_kind;
5202 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5203 attr.dw_attr_val.val_entry = NULL;
5204 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5205 if (dwarf_split_debug_info)
5206 attr.dw_attr_val.val_entry
5207 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5208 ate_kind_label);
5209 add_dwarf_attr (die, &attr);
5210 }
5211
5212 /* Add a section offset attribute value to a DIE, an offset into the
5213 debug_line section. */
5214
5215 static inline void
5216 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5217 const char *label)
5218 {
5219 dw_attr_node attr;
5220
5221 attr.dw_attr = attr_kind;
5222 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5223 attr.dw_attr_val.val_entry = NULL;
5224 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5225 add_dwarf_attr (die, &attr);
5226 }
5227
5228 /* Add a section offset attribute value to a DIE, an offset into the
5229 debug_loclists section. */
5230
5231 static inline void
5232 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5233 const char *label)
5234 {
5235 dw_attr_node attr;
5236
5237 attr.dw_attr = attr_kind;
5238 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
5239 attr.dw_attr_val.val_entry = NULL;
5240 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5241 add_dwarf_attr (die, &attr);
5242 }
5243
5244 /* Add a section offset attribute value to a DIE, an offset into the
5245 debug_macinfo section. */
5246
5247 static inline void
5248 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5249 const char *label)
5250 {
5251 dw_attr_node attr;
5252
5253 attr.dw_attr = attr_kind;
5254 attr.dw_attr_val.val_class = dw_val_class_macptr;
5255 attr.dw_attr_val.val_entry = NULL;
5256 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5257 add_dwarf_attr (die, &attr);
5258 }
5259
5260 /* Add an offset attribute value to a DIE. */
5261
5262 static inline void
5263 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
5264 unsigned HOST_WIDE_INT offset)
5265 {
5266 dw_attr_node attr;
5267
5268 attr.dw_attr = attr_kind;
5269 attr.dw_attr_val.val_class = dw_val_class_offset;
5270 attr.dw_attr_val.val_entry = NULL;
5271 attr.dw_attr_val.v.val_offset = offset;
5272 add_dwarf_attr (die, &attr);
5273 }
5274
5275 /* Add a range_list attribute value to a DIE. When using
5276 dwarf_split_debug_info, address attributes in dies destined for the
5277 final executable should be direct references--setting the parameter
5278 force_direct ensures this behavior. */
5279
5280 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5281 #define RELOCATED_OFFSET (NULL)
5282
5283 static void
5284 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5285 long unsigned int offset, bool force_direct)
5286 {
5287 dw_attr_node attr;
5288
5289 attr.dw_attr = attr_kind;
5290 attr.dw_attr_val.val_class = dw_val_class_range_list;
5291 /* For the range_list attribute, use val_entry to store whether the
5292 offset should follow split-debug-info or normal semantics. This
5293 value is read in output_range_list_offset. */
5294 if (dwarf_split_debug_info && !force_direct)
5295 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5296 else
5297 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5298 attr.dw_attr_val.v.val_offset = offset;
5299 add_dwarf_attr (die, &attr);
5300 }
5301
5302 /* Return the start label of a delta attribute. */
5303
5304 static inline const char *
5305 AT_vms_delta1 (dw_attr_node *a)
5306 {
5307 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5308 return a->dw_attr_val.v.val_vms_delta.lbl1;
5309 }
5310
5311 /* Return the end label of a delta attribute. */
5312
5313 static inline const char *
5314 AT_vms_delta2 (dw_attr_node *a)
5315 {
5316 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5317 return a->dw_attr_val.v.val_vms_delta.lbl2;
5318 }
5319
5320 static inline const char *
5321 AT_lbl (dw_attr_node *a)
5322 {
5323 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5324 || AT_class (a) == dw_val_class_lineptr
5325 || AT_class (a) == dw_val_class_macptr
5326 || AT_class (a) == dw_val_class_loclistsptr
5327 || AT_class (a) == dw_val_class_high_pc));
5328 return a->dw_attr_val.v.val_lbl_id;
5329 }
5330
5331 /* Get the attribute of type attr_kind. */
5332
5333 static dw_attr_node *
5334 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5335 {
5336 dw_attr_node *a;
5337 unsigned ix;
5338 dw_die_ref spec = NULL;
5339
5340 if (! die)
5341 return NULL;
5342
5343 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5344 if (a->dw_attr == attr_kind)
5345 return a;
5346 else if (a->dw_attr == DW_AT_specification
5347 || a->dw_attr == DW_AT_abstract_origin)
5348 spec = AT_ref (a);
5349
5350 if (spec)
5351 return get_AT (spec, attr_kind);
5352
5353 return NULL;
5354 }
5355
5356 /* Returns the parent of the declaration of DIE. */
5357
5358 static dw_die_ref
5359 get_die_parent (dw_die_ref die)
5360 {
5361 dw_die_ref t;
5362
5363 if (!die)
5364 return NULL;
5365
5366 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5367 || (t = get_AT_ref (die, DW_AT_specification)))
5368 die = t;
5369
5370 return die->die_parent;
5371 }
5372
5373 /* Return the "low pc" attribute value, typically associated with a subprogram
5374 DIE. Return null if the "low pc" attribute is either not present, or if it
5375 cannot be represented as an assembler label identifier. */
5376
5377 static inline const char *
5378 get_AT_low_pc (dw_die_ref die)
5379 {
5380 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5381
5382 return a ? AT_lbl (a) : NULL;
5383 }
5384
5385 /* Return the "high pc" attribute value, typically associated with a subprogram
5386 DIE. Return null if the "high pc" attribute is either not present, or if it
5387 cannot be represented as an assembler label identifier. */
5388
5389 static inline const char *
5390 get_AT_hi_pc (dw_die_ref die)
5391 {
5392 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5393
5394 return a ? AT_lbl (a) : NULL;
5395 }
5396
5397 /* Return the value of the string attribute designated by ATTR_KIND, or
5398 NULL if it is not present. */
5399
5400 static inline const char *
5401 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5402 {
5403 dw_attr_node *a = get_AT (die, attr_kind);
5404
5405 return a ? AT_string (a) : NULL;
5406 }
5407
5408 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5409 if it is not present. */
5410
5411 static inline int
5412 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5413 {
5414 dw_attr_node *a = get_AT (die, attr_kind);
5415
5416 return a ? AT_flag (a) : 0;
5417 }
5418
5419 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5420 if it is not present. */
5421
5422 static inline unsigned
5423 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5424 {
5425 dw_attr_node *a = get_AT (die, attr_kind);
5426
5427 return a ? AT_unsigned (a) : 0;
5428 }
5429
5430 static inline dw_die_ref
5431 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5432 {
5433 dw_attr_node *a = get_AT (die, attr_kind);
5434
5435 return a ? AT_ref (a) : NULL;
5436 }
5437
5438 static inline struct dwarf_file_data *
5439 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5440 {
5441 dw_attr_node *a = get_AT (die, attr_kind);
5442
5443 return a ? AT_file (a) : NULL;
5444 }
5445
5446 /* Return TRUE if the language is C++. */
5447
5448 static inline bool
5449 is_cxx (void)
5450 {
5451 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5452
5453 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5454 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5455 }
5456
5457 /* Return TRUE if DECL was created by the C++ frontend. */
5458
5459 static bool
5460 is_cxx (const_tree decl)
5461 {
5462 if (in_lto_p)
5463 {
5464 const_tree context = get_ultimate_context (decl);
5465 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5466 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5467 }
5468 return is_cxx ();
5469 }
5470
5471 /* Return TRUE if the language is Fortran. */
5472
5473 static inline bool
5474 is_fortran (void)
5475 {
5476 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5477
5478 return (lang == DW_LANG_Fortran77
5479 || lang == DW_LANG_Fortran90
5480 || lang == DW_LANG_Fortran95
5481 || lang == DW_LANG_Fortran03
5482 || lang == DW_LANG_Fortran08);
5483 }
5484
5485 static inline bool
5486 is_fortran (const_tree decl)
5487 {
5488 if (in_lto_p)
5489 {
5490 const_tree context = get_ultimate_context (decl);
5491 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5492 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5493 "GNU Fortran", 11) == 0
5494 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5495 "GNU F77") == 0);
5496 }
5497 return is_fortran ();
5498 }
5499
5500 /* Return TRUE if the language is Ada. */
5501
5502 static inline bool
5503 is_ada (void)
5504 {
5505 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5506
5507 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5508 }
5509
5510 /* Remove the specified attribute if present. Return TRUE if removal
5511 was successful. */
5512
5513 static bool
5514 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5515 {
5516 dw_attr_node *a;
5517 unsigned ix;
5518
5519 if (! die)
5520 return false;
5521
5522 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5523 if (a->dw_attr == attr_kind)
5524 {
5525 if (AT_class (a) == dw_val_class_str)
5526 if (a->dw_attr_val.v.val_str->refcount)
5527 a->dw_attr_val.v.val_str->refcount--;
5528
5529 /* vec::ordered_remove should help reduce the number of abbrevs
5530 that are needed. */
5531 die->die_attr->ordered_remove (ix);
5532 return true;
5533 }
5534 return false;
5535 }
5536
5537 /* Remove CHILD from its parent. PREV must have the property that
5538 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5539
5540 static void
5541 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5542 {
5543 gcc_assert (child->die_parent == prev->die_parent);
5544 gcc_assert (prev->die_sib == child);
5545 if (prev == child)
5546 {
5547 gcc_assert (child->die_parent->die_child == child);
5548 prev = NULL;
5549 }
5550 else
5551 prev->die_sib = child->die_sib;
5552 if (child->die_parent->die_child == child)
5553 child->die_parent->die_child = prev;
5554 child->die_sib = NULL;
5555 }
5556
5557 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5558 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5559
5560 static void
5561 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5562 {
5563 dw_die_ref parent = old_child->die_parent;
5564
5565 gcc_assert (parent == prev->die_parent);
5566 gcc_assert (prev->die_sib == old_child);
5567
5568 new_child->die_parent = parent;
5569 if (prev == old_child)
5570 {
5571 gcc_assert (parent->die_child == old_child);
5572 new_child->die_sib = new_child;
5573 }
5574 else
5575 {
5576 prev->die_sib = new_child;
5577 new_child->die_sib = old_child->die_sib;
5578 }
5579 if (old_child->die_parent->die_child == old_child)
5580 old_child->die_parent->die_child = new_child;
5581 old_child->die_sib = NULL;
5582 }
5583
5584 /* Move all children from OLD_PARENT to NEW_PARENT. */
5585
5586 static void
5587 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5588 {
5589 dw_die_ref c;
5590 new_parent->die_child = old_parent->die_child;
5591 old_parent->die_child = NULL;
5592 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5593 }
5594
5595 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5596 matches TAG. */
5597
5598 static void
5599 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5600 {
5601 dw_die_ref c;
5602
5603 c = die->die_child;
5604 if (c) do {
5605 dw_die_ref prev = c;
5606 c = c->die_sib;
5607 while (c->die_tag == tag)
5608 {
5609 remove_child_with_prev (c, prev);
5610 c->die_parent = NULL;
5611 /* Might have removed every child. */
5612 if (die->die_child == NULL)
5613 return;
5614 c = prev->die_sib;
5615 }
5616 } while (c != die->die_child);
5617 }
5618
5619 /* Add a CHILD_DIE as the last child of DIE. */
5620
5621 static void
5622 add_child_die (dw_die_ref die, dw_die_ref child_die)
5623 {
5624 /* FIXME this should probably be an assert. */
5625 if (! die || ! child_die)
5626 return;
5627 gcc_assert (die != child_die);
5628
5629 child_die->die_parent = die;
5630 if (die->die_child)
5631 {
5632 child_die->die_sib = die->die_child->die_sib;
5633 die->die_child->die_sib = child_die;
5634 }
5635 else
5636 child_die->die_sib = child_die;
5637 die->die_child = child_die;
5638 }
5639
5640 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5641
5642 static void
5643 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5644 dw_die_ref after_die)
5645 {
5646 gcc_assert (die
5647 && child_die
5648 && after_die
5649 && die->die_child
5650 && die != child_die);
5651
5652 child_die->die_parent = die;
5653 child_die->die_sib = after_die->die_sib;
5654 after_die->die_sib = child_die;
5655 if (die->die_child == after_die)
5656 die->die_child = child_die;
5657 }
5658
5659 /* Unassociate CHILD from its parent, and make its parent be
5660 NEW_PARENT. */
5661
5662 static void
5663 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5664 {
5665 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5666 if (p->die_sib == child)
5667 {
5668 remove_child_with_prev (child, p);
5669 break;
5670 }
5671 add_child_die (new_parent, child);
5672 }
5673
5674 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5675 is the specification, to the end of PARENT's list of children.
5676 This is done by removing and re-adding it. */
5677
5678 static void
5679 splice_child_die (dw_die_ref parent, dw_die_ref child)
5680 {
5681 /* We want the declaration DIE from inside the class, not the
5682 specification DIE at toplevel. */
5683 if (child->die_parent != parent)
5684 {
5685 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5686
5687 if (tmp)
5688 child = tmp;
5689 }
5690
5691 gcc_assert (child->die_parent == parent
5692 || (child->die_parent
5693 == get_AT_ref (parent, DW_AT_specification)));
5694
5695 reparent_child (child, parent);
5696 }
5697
5698 /* Create and return a new die with TAG_VALUE as tag. */
5699
5700 static inline dw_die_ref
5701 new_die_raw (enum dwarf_tag tag_value)
5702 {
5703 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5704 die->die_tag = tag_value;
5705 return die;
5706 }
5707
5708 /* Create and return a new die with a parent of PARENT_DIE. If
5709 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5710 associated tree T must be supplied to determine parenthood
5711 later. */
5712
5713 static inline dw_die_ref
5714 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5715 {
5716 dw_die_ref die = new_die_raw (tag_value);
5717
5718 if (parent_die != NULL)
5719 add_child_die (parent_die, die);
5720 else
5721 {
5722 limbo_die_node *limbo_node;
5723
5724 /* No DIEs created after early dwarf should end up in limbo,
5725 because the limbo list should not persist past LTO
5726 streaming. */
5727 if (tag_value != DW_TAG_compile_unit
5728 /* These are allowed because they're generated while
5729 breaking out COMDAT units late. */
5730 && tag_value != DW_TAG_type_unit
5731 && tag_value != DW_TAG_skeleton_unit
5732 && !early_dwarf
5733 /* Allow nested functions to live in limbo because they will
5734 only temporarily live there, as decls_for_scope will fix
5735 them up. */
5736 && (TREE_CODE (t) != FUNCTION_DECL
5737 || !decl_function_context (t))
5738 /* Same as nested functions above but for types. Types that
5739 are local to a function will be fixed in
5740 decls_for_scope. */
5741 && (!RECORD_OR_UNION_TYPE_P (t)
5742 || !TYPE_CONTEXT (t)
5743 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5744 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5745 especially in the ltrans stage, but once we implement LTO
5746 dwarf streaming, we should remove this exception. */
5747 && !in_lto_p)
5748 {
5749 fprintf (stderr, "symbol ended up in limbo too late:");
5750 debug_generic_stmt (t);
5751 gcc_unreachable ();
5752 }
5753
5754 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5755 limbo_node->die = die;
5756 limbo_node->created_for = t;
5757 limbo_node->next = limbo_die_list;
5758 limbo_die_list = limbo_node;
5759 }
5760
5761 return die;
5762 }
5763
5764 /* Return the DIE associated with the given type specifier. */
5765
5766 static inline dw_die_ref
5767 lookup_type_die (tree type)
5768 {
5769 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5770 if (die && die->removed)
5771 {
5772 TYPE_SYMTAB_DIE (type) = NULL;
5773 return NULL;
5774 }
5775 return die;
5776 }
5777
5778 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5779 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5780 anonymous type instead the one of the naming typedef. */
5781
5782 static inline dw_die_ref
5783 strip_naming_typedef (tree type, dw_die_ref type_die)
5784 {
5785 if (type
5786 && TREE_CODE (type) == RECORD_TYPE
5787 && type_die
5788 && type_die->die_tag == DW_TAG_typedef
5789 && is_naming_typedef_decl (TYPE_NAME (type)))
5790 type_die = get_AT_ref (type_die, DW_AT_type);
5791 return type_die;
5792 }
5793
5794 /* Like lookup_type_die, but if type is an anonymous type named by a
5795 typedef[1], return the DIE of the anonymous type instead the one of
5796 the naming typedef. This is because in gen_typedef_die, we did
5797 equate the anonymous struct named by the typedef with the DIE of
5798 the naming typedef. So by default, lookup_type_die on an anonymous
5799 struct yields the DIE of the naming typedef.
5800
5801 [1]: Read the comment of is_naming_typedef_decl to learn about what
5802 a naming typedef is. */
5803
5804 static inline dw_die_ref
5805 lookup_type_die_strip_naming_typedef (tree type)
5806 {
5807 dw_die_ref die = lookup_type_die (type);
5808 return strip_naming_typedef (type, die);
5809 }
5810
5811 /* Equate a DIE to a given type specifier. */
5812
5813 static inline void
5814 equate_type_number_to_die (tree type, dw_die_ref type_die)
5815 {
5816 TYPE_SYMTAB_DIE (type) = type_die;
5817 }
5818
5819 /* Returns a hash value for X (which really is a die_struct). */
5820
5821 inline hashval_t
5822 decl_die_hasher::hash (die_node *x)
5823 {
5824 return (hashval_t) x->decl_id;
5825 }
5826
5827 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5828
5829 inline bool
5830 decl_die_hasher::equal (die_node *x, tree y)
5831 {
5832 return (x->decl_id == DECL_UID (y));
5833 }
5834
5835 /* Return the DIE associated with a given declaration. */
5836
5837 static inline dw_die_ref
5838 lookup_decl_die (tree decl)
5839 {
5840 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5841 NO_INSERT);
5842 if (!die)
5843 return NULL;
5844 if ((*die)->removed)
5845 {
5846 decl_die_table->clear_slot (die);
5847 return NULL;
5848 }
5849 return *die;
5850 }
5851
5852
5853 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5854 style reference. Return true if we found one refering to a DIE for
5855 DECL, otherwise return false. */
5856
5857 static bool
5858 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5859 unsigned HOST_WIDE_INT *off)
5860 {
5861 dw_die_ref die;
5862
5863 if ((flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
5864 && !decl_die_table)
5865 return false;
5866
5867 if (TREE_CODE (decl) == BLOCK)
5868 die = BLOCK_DIE (decl);
5869 else
5870 die = lookup_decl_die (decl);
5871 if (!die)
5872 return false;
5873
5874 /* During WPA stage and incremental linking we currently use DIEs
5875 to store the decl <-> label + offset map. That's quite inefficient
5876 but it works for now. */
5877 if (flag_wpa
5878 || flag_incremental_link == INCREMENTAL_LINK_LTO)
5879 {
5880 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5881 if (!ref)
5882 {
5883 gcc_assert (die == comp_unit_die ());
5884 return false;
5885 }
5886 *off = ref->die_offset;
5887 *sym = ref->die_id.die_symbol;
5888 return true;
5889 }
5890
5891 /* Similar to get_ref_die_offset_label, but using the "correct"
5892 label. */
5893 *off = die->die_offset;
5894 while (die->die_parent)
5895 die = die->die_parent;
5896 /* For the containing CU DIE we compute a die_symbol in
5897 compute_comp_unit_symbol. */
5898 gcc_assert (die->die_tag == DW_TAG_compile_unit
5899 && die->die_id.die_symbol != NULL);
5900 *sym = die->die_id.die_symbol;
5901 return true;
5902 }
5903
5904 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5905
5906 static void
5907 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5908 const char *symbol, HOST_WIDE_INT offset)
5909 {
5910 /* Create a fake DIE that contains the reference. Don't use
5911 new_die because we don't want to end up in the limbo list. */
5912 dw_die_ref ref = new_die_raw (die->die_tag);
5913 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5914 ref->die_offset = offset;
5915 ref->with_offset = 1;
5916 add_AT_die_ref (die, attr_kind, ref);
5917 }
5918
5919 /* Create a DIE for DECL if required and add a reference to a DIE
5920 at SYMBOL + OFFSET which contains attributes dumped early. */
5921
5922 static void
5923 dwarf2out_register_external_die (tree decl, const char *sym,
5924 unsigned HOST_WIDE_INT off)
5925 {
5926 if (debug_info_level == DINFO_LEVEL_NONE)
5927 return;
5928
5929 if ((flag_wpa
5930 || flag_incremental_link == INCREMENTAL_LINK_LTO) && !decl_die_table)
5931 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5932
5933 dw_die_ref die
5934 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5935 gcc_assert (!die);
5936
5937 tree ctx;
5938 dw_die_ref parent = NULL;
5939 /* Need to lookup a DIE for the decls context - the containing
5940 function or translation unit. */
5941 if (TREE_CODE (decl) == BLOCK)
5942 {
5943 ctx = BLOCK_SUPERCONTEXT (decl);
5944 /* ??? We do not output DIEs for all scopes thus skip as
5945 many DIEs as needed. */
5946 while (TREE_CODE (ctx) == BLOCK
5947 && !BLOCK_DIE (ctx))
5948 ctx = BLOCK_SUPERCONTEXT (ctx);
5949 }
5950 else
5951 ctx = DECL_CONTEXT (decl);
5952 /* Peel types in the context stack. */
5953 while (ctx && TYPE_P (ctx))
5954 ctx = TYPE_CONTEXT (ctx);
5955 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5956 if (debug_info_level <= DINFO_LEVEL_TERSE)
5957 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5958 ctx = DECL_CONTEXT (ctx);
5959 if (ctx)
5960 {
5961 if (TREE_CODE (ctx) == BLOCK)
5962 parent = BLOCK_DIE (ctx);
5963 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5964 /* Keep the 1:1 association during WPA. */
5965 && !flag_wpa
5966 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5967 /* Otherwise all late annotations go to the main CU which
5968 imports the original CUs. */
5969 parent = comp_unit_die ();
5970 else if (TREE_CODE (ctx) == FUNCTION_DECL
5971 && TREE_CODE (decl) != PARM_DECL
5972 && TREE_CODE (decl) != BLOCK)
5973 /* Leave function local entities parent determination to when
5974 we process scope vars. */
5975 ;
5976 else
5977 parent = lookup_decl_die (ctx);
5978 }
5979 else
5980 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5981 Handle this case gracefully by globalizing stuff. */
5982 parent = comp_unit_die ();
5983 /* Create a DIE "stub". */
5984 switch (TREE_CODE (decl))
5985 {
5986 case TRANSLATION_UNIT_DECL:
5987 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
5988 {
5989 die = comp_unit_die ();
5990 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5991 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5992 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5993 to create a DIE for the original CUs. */
5994 return;
5995 }
5996 /* Keep the 1:1 association during WPA. */
5997 die = new_die (DW_TAG_compile_unit, NULL, decl);
5998 break;
5999 case NAMESPACE_DECL:
6000 if (is_fortran (decl))
6001 die = new_die (DW_TAG_module, parent, decl);
6002 else
6003 die = new_die (DW_TAG_namespace, parent, decl);
6004 break;
6005 case FUNCTION_DECL:
6006 die = new_die (DW_TAG_subprogram, parent, decl);
6007 break;
6008 case VAR_DECL:
6009 die = new_die (DW_TAG_variable, parent, decl);
6010 break;
6011 case RESULT_DECL:
6012 die = new_die (DW_TAG_variable, parent, decl);
6013 break;
6014 case PARM_DECL:
6015 die = new_die (DW_TAG_formal_parameter, parent, decl);
6016 break;
6017 case CONST_DECL:
6018 die = new_die (DW_TAG_constant, parent, decl);
6019 break;
6020 case LABEL_DECL:
6021 die = new_die (DW_TAG_label, parent, decl);
6022 break;
6023 case BLOCK:
6024 die = new_die (DW_TAG_lexical_block, parent, decl);
6025 break;
6026 default:
6027 gcc_unreachable ();
6028 }
6029 if (TREE_CODE (decl) == BLOCK)
6030 BLOCK_DIE (decl) = die;
6031 else
6032 equate_decl_number_to_die (decl, die);
6033
6034 /* Add a reference to the DIE providing early debug at $sym + off. */
6035 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6036 }
6037
6038 /* Returns a hash value for X (which really is a var_loc_list). */
6039
6040 inline hashval_t
6041 decl_loc_hasher::hash (var_loc_list *x)
6042 {
6043 return (hashval_t) x->decl_id;
6044 }
6045
6046 /* Return nonzero if decl_id of var_loc_list X is the same as
6047 UID of decl *Y. */
6048
6049 inline bool
6050 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6051 {
6052 return (x->decl_id == DECL_UID (y));
6053 }
6054
6055 /* Return the var_loc list associated with a given declaration. */
6056
6057 static inline var_loc_list *
6058 lookup_decl_loc (const_tree decl)
6059 {
6060 if (!decl_loc_table)
6061 return NULL;
6062 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6063 }
6064
6065 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6066
6067 inline hashval_t
6068 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6069 {
6070 return (hashval_t) x->decl_id;
6071 }
6072
6073 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6074 UID of decl *Y. */
6075
6076 inline bool
6077 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6078 {
6079 return (x->decl_id == DECL_UID (y));
6080 }
6081
6082 /* Equate a DIE to a particular declaration. */
6083
6084 static void
6085 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6086 {
6087 unsigned int decl_id = DECL_UID (decl);
6088
6089 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6090 decl_die->decl_id = decl_id;
6091 }
6092
6093 /* Return how many bits covers PIECE EXPR_LIST. */
6094
6095 static HOST_WIDE_INT
6096 decl_piece_bitsize (rtx piece)
6097 {
6098 int ret = (int) GET_MODE (piece);
6099 if (ret)
6100 return ret;
6101 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6102 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6103 return INTVAL (XEXP (XEXP (piece, 0), 0));
6104 }
6105
6106 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6107
6108 static rtx *
6109 decl_piece_varloc_ptr (rtx piece)
6110 {
6111 if ((int) GET_MODE (piece))
6112 return &XEXP (piece, 0);
6113 else
6114 return &XEXP (XEXP (piece, 0), 1);
6115 }
6116
6117 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6118 Next is the chain of following piece nodes. */
6119
6120 static rtx_expr_list *
6121 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6122 {
6123 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6124 return alloc_EXPR_LIST (bitsize, loc_note, next);
6125 else
6126 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6127 GEN_INT (bitsize),
6128 loc_note), next);
6129 }
6130
6131 /* Return rtx that should be stored into loc field for
6132 LOC_NOTE and BITPOS/BITSIZE. */
6133
6134 static rtx
6135 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6136 HOST_WIDE_INT bitsize)
6137 {
6138 if (bitsize != -1)
6139 {
6140 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6141 if (bitpos != 0)
6142 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6143 }
6144 return loc_note;
6145 }
6146
6147 /* This function either modifies location piece list *DEST in
6148 place (if SRC and INNER is NULL), or copies location piece list
6149 *SRC to *DEST while modifying it. Location BITPOS is modified
6150 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6151 not copied and if needed some padding around it is added.
6152 When modifying in place, DEST should point to EXPR_LIST where
6153 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6154 to the start of the whole list and INNER points to the EXPR_LIST
6155 where earlier pieces cover PIECE_BITPOS bits. */
6156
6157 static void
6158 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6159 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6160 HOST_WIDE_INT bitsize, rtx loc_note)
6161 {
6162 HOST_WIDE_INT diff;
6163 bool copy = inner != NULL;
6164
6165 if (copy)
6166 {
6167 /* First copy all nodes preceding the current bitpos. */
6168 while (src != inner)
6169 {
6170 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6171 decl_piece_bitsize (*src), NULL_RTX);
6172 dest = &XEXP (*dest, 1);
6173 src = &XEXP (*src, 1);
6174 }
6175 }
6176 /* Add padding if needed. */
6177 if (bitpos != piece_bitpos)
6178 {
6179 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6180 copy ? NULL_RTX : *dest);
6181 dest = &XEXP (*dest, 1);
6182 }
6183 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6184 {
6185 gcc_assert (!copy);
6186 /* A piece with correct bitpos and bitsize already exist,
6187 just update the location for it and return. */
6188 *decl_piece_varloc_ptr (*dest) = loc_note;
6189 return;
6190 }
6191 /* Add the piece that changed. */
6192 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6193 dest = &XEXP (*dest, 1);
6194 /* Skip over pieces that overlap it. */
6195 diff = bitpos - piece_bitpos + bitsize;
6196 if (!copy)
6197 src = dest;
6198 while (diff > 0 && *src)
6199 {
6200 rtx piece = *src;
6201 diff -= decl_piece_bitsize (piece);
6202 if (copy)
6203 src = &XEXP (piece, 1);
6204 else
6205 {
6206 *src = XEXP (piece, 1);
6207 free_EXPR_LIST_node (piece);
6208 }
6209 }
6210 /* Add padding if needed. */
6211 if (diff < 0 && *src)
6212 {
6213 if (!copy)
6214 dest = src;
6215 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6216 dest = &XEXP (*dest, 1);
6217 }
6218 if (!copy)
6219 return;
6220 /* Finally copy all nodes following it. */
6221 while (*src)
6222 {
6223 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6224 decl_piece_bitsize (*src), NULL_RTX);
6225 dest = &XEXP (*dest, 1);
6226 src = &XEXP (*src, 1);
6227 }
6228 }
6229
6230 /* Add a variable location node to the linked list for DECL. */
6231
6232 static struct var_loc_node *
6233 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6234 {
6235 unsigned int decl_id;
6236 var_loc_list *temp;
6237 struct var_loc_node *loc = NULL;
6238 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6239
6240 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6241 {
6242 tree realdecl = DECL_DEBUG_EXPR (decl);
6243 if (handled_component_p (realdecl)
6244 || (TREE_CODE (realdecl) == MEM_REF
6245 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6246 {
6247 bool reverse;
6248 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6249 &bitsize, &reverse);
6250 if (!innerdecl
6251 || !DECL_P (innerdecl)
6252 || DECL_IGNORED_P (innerdecl)
6253 || TREE_STATIC (innerdecl)
6254 || bitsize == 0
6255 || bitpos + bitsize > 256)
6256 return NULL;
6257 decl = innerdecl;
6258 }
6259 }
6260
6261 decl_id = DECL_UID (decl);
6262 var_loc_list **slot
6263 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6264 if (*slot == NULL)
6265 {
6266 temp = ggc_cleared_alloc<var_loc_list> ();
6267 temp->decl_id = decl_id;
6268 *slot = temp;
6269 }
6270 else
6271 temp = *slot;
6272
6273 /* For PARM_DECLs try to keep around the original incoming value,
6274 even if that means we'll emit a zero-range .debug_loc entry. */
6275 if (temp->last
6276 && temp->first == temp->last
6277 && TREE_CODE (decl) == PARM_DECL
6278 && NOTE_P (temp->first->loc)
6279 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6280 && DECL_INCOMING_RTL (decl)
6281 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6282 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6283 == GET_CODE (DECL_INCOMING_RTL (decl))
6284 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6285 && (bitsize != -1
6286 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6287 NOTE_VAR_LOCATION_LOC (loc_note))
6288 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6289 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6290 {
6291 loc = ggc_cleared_alloc<var_loc_node> ();
6292 temp->first->next = loc;
6293 temp->last = loc;
6294 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6295 }
6296 else if (temp->last)
6297 {
6298 struct var_loc_node *last = temp->last, *unused = NULL;
6299 rtx *piece_loc = NULL, last_loc_note;
6300 HOST_WIDE_INT piece_bitpos = 0;
6301 if (last->next)
6302 {
6303 last = last->next;
6304 gcc_assert (last->next == NULL);
6305 }
6306 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6307 {
6308 piece_loc = &last->loc;
6309 do
6310 {
6311 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6312 if (piece_bitpos + cur_bitsize > bitpos)
6313 break;
6314 piece_bitpos += cur_bitsize;
6315 piece_loc = &XEXP (*piece_loc, 1);
6316 }
6317 while (*piece_loc);
6318 }
6319 /* TEMP->LAST here is either pointer to the last but one or
6320 last element in the chained list, LAST is pointer to the
6321 last element. */
6322 if (label && strcmp (last->label, label) == 0 && last->view == view)
6323 {
6324 /* For SRA optimized variables if there weren't any real
6325 insns since last note, just modify the last node. */
6326 if (piece_loc != NULL)
6327 {
6328 adjust_piece_list (piece_loc, NULL, NULL,
6329 bitpos, piece_bitpos, bitsize, loc_note);
6330 return NULL;
6331 }
6332 /* If the last note doesn't cover any instructions, remove it. */
6333 if (temp->last != last)
6334 {
6335 temp->last->next = NULL;
6336 unused = last;
6337 last = temp->last;
6338 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6339 }
6340 else
6341 {
6342 gcc_assert (temp->first == temp->last
6343 || (temp->first->next == temp->last
6344 && TREE_CODE (decl) == PARM_DECL));
6345 memset (temp->last, '\0', sizeof (*temp->last));
6346 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6347 return temp->last;
6348 }
6349 }
6350 if (bitsize == -1 && NOTE_P (last->loc))
6351 last_loc_note = last->loc;
6352 else if (piece_loc != NULL
6353 && *piece_loc != NULL_RTX
6354 && piece_bitpos == bitpos
6355 && decl_piece_bitsize (*piece_loc) == bitsize)
6356 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6357 else
6358 last_loc_note = NULL_RTX;
6359 /* If the current location is the same as the end of the list,
6360 and either both or neither of the locations is uninitialized,
6361 we have nothing to do. */
6362 if (last_loc_note == NULL_RTX
6363 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6364 NOTE_VAR_LOCATION_LOC (loc_note)))
6365 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6366 != NOTE_VAR_LOCATION_STATUS (loc_note))
6367 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6368 == VAR_INIT_STATUS_UNINITIALIZED)
6369 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6370 == VAR_INIT_STATUS_UNINITIALIZED))))
6371 {
6372 /* Add LOC to the end of list and update LAST. If the last
6373 element of the list has been removed above, reuse its
6374 memory for the new node, otherwise allocate a new one. */
6375 if (unused)
6376 {
6377 loc = unused;
6378 memset (loc, '\0', sizeof (*loc));
6379 }
6380 else
6381 loc = ggc_cleared_alloc<var_loc_node> ();
6382 if (bitsize == -1 || piece_loc == NULL)
6383 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6384 else
6385 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6386 bitpos, piece_bitpos, bitsize, loc_note);
6387 last->next = loc;
6388 /* Ensure TEMP->LAST will point either to the new last but one
6389 element of the chain, or to the last element in it. */
6390 if (last != temp->last)
6391 temp->last = last;
6392 }
6393 else if (unused)
6394 ggc_free (unused);
6395 }
6396 else
6397 {
6398 loc = ggc_cleared_alloc<var_loc_node> ();
6399 temp->first = loc;
6400 temp->last = loc;
6401 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6402 }
6403 return loc;
6404 }
6405 \f
6406 /* Keep track of the number of spaces used to indent the
6407 output of the debugging routines that print the structure of
6408 the DIE internal representation. */
6409 static int print_indent;
6410
6411 /* Indent the line the number of spaces given by print_indent. */
6412
6413 static inline void
6414 print_spaces (FILE *outfile)
6415 {
6416 fprintf (outfile, "%*s", print_indent, "");
6417 }
6418
6419 /* Print a type signature in hex. */
6420
6421 static inline void
6422 print_signature (FILE *outfile, char *sig)
6423 {
6424 int i;
6425
6426 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6427 fprintf (outfile, "%02x", sig[i] & 0xff);
6428 }
6429
6430 static inline void
6431 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6432 {
6433 if (discr_value->pos)
6434 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6435 else
6436 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6437 }
6438
6439 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6440
6441 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6442 RECURSE, output location descriptor operations. */
6443
6444 static void
6445 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6446 {
6447 switch (val->val_class)
6448 {
6449 case dw_val_class_addr:
6450 fprintf (outfile, "address");
6451 break;
6452 case dw_val_class_offset:
6453 fprintf (outfile, "offset");
6454 break;
6455 case dw_val_class_loc:
6456 fprintf (outfile, "location descriptor");
6457 if (val->v.val_loc == NULL)
6458 fprintf (outfile, " -> <null>\n");
6459 else if (recurse)
6460 {
6461 fprintf (outfile, ":\n");
6462 print_indent += 4;
6463 print_loc_descr (val->v.val_loc, outfile);
6464 print_indent -= 4;
6465 }
6466 else
6467 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6468 break;
6469 case dw_val_class_loc_list:
6470 fprintf (outfile, "location list -> label:%s",
6471 val->v.val_loc_list->ll_symbol);
6472 break;
6473 case dw_val_class_view_list:
6474 val = view_list_to_loc_list_val_node (val);
6475 fprintf (outfile, "location list with views -> labels:%s and %s",
6476 val->v.val_loc_list->ll_symbol,
6477 val->v.val_loc_list->vl_symbol);
6478 break;
6479 case dw_val_class_range_list:
6480 fprintf (outfile, "range list");
6481 break;
6482 case dw_val_class_const:
6483 case dw_val_class_const_implicit:
6484 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6485 break;
6486 case dw_val_class_unsigned_const:
6487 case dw_val_class_unsigned_const_implicit:
6488 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6489 break;
6490 case dw_val_class_const_double:
6491 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6492 HOST_WIDE_INT_PRINT_UNSIGNED")",
6493 val->v.val_double.high,
6494 val->v.val_double.low);
6495 break;
6496 case dw_val_class_wide_int:
6497 {
6498 int i = val->v.val_wide->get_len ();
6499 fprintf (outfile, "constant (");
6500 gcc_assert (i > 0);
6501 if (val->v.val_wide->elt (i - 1) == 0)
6502 fprintf (outfile, "0x");
6503 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6504 val->v.val_wide->elt (--i));
6505 while (--i >= 0)
6506 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6507 val->v.val_wide->elt (i));
6508 fprintf (outfile, ")");
6509 break;
6510 }
6511 case dw_val_class_vec:
6512 fprintf (outfile, "floating-point or vector constant");
6513 break;
6514 case dw_val_class_flag:
6515 fprintf (outfile, "%u", val->v.val_flag);
6516 break;
6517 case dw_val_class_die_ref:
6518 if (val->v.val_die_ref.die != NULL)
6519 {
6520 dw_die_ref die = val->v.val_die_ref.die;
6521
6522 if (die->comdat_type_p)
6523 {
6524 fprintf (outfile, "die -> signature: ");
6525 print_signature (outfile,
6526 die->die_id.die_type_node->signature);
6527 }
6528 else if (die->die_id.die_symbol)
6529 {
6530 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6531 if (die->with_offset)
6532 fprintf (outfile, " + %ld", die->die_offset);
6533 }
6534 else
6535 fprintf (outfile, "die -> %ld", die->die_offset);
6536 fprintf (outfile, " (%p)", (void *) die);
6537 }
6538 else
6539 fprintf (outfile, "die -> <null>");
6540 break;
6541 case dw_val_class_vms_delta:
6542 fprintf (outfile, "delta: @slotcount(%s-%s)",
6543 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6544 break;
6545 case dw_val_class_symview:
6546 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6547 break;
6548 case dw_val_class_lbl_id:
6549 case dw_val_class_lineptr:
6550 case dw_val_class_macptr:
6551 case dw_val_class_loclistsptr:
6552 case dw_val_class_high_pc:
6553 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6554 break;
6555 case dw_val_class_str:
6556 if (val->v.val_str->str != NULL)
6557 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6558 else
6559 fprintf (outfile, "<null>");
6560 break;
6561 case dw_val_class_file:
6562 case dw_val_class_file_implicit:
6563 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6564 val->v.val_file->emitted_number);
6565 break;
6566 case dw_val_class_data8:
6567 {
6568 int i;
6569
6570 for (i = 0; i < 8; i++)
6571 fprintf (outfile, "%02x", val->v.val_data8[i]);
6572 break;
6573 }
6574 case dw_val_class_discr_value:
6575 print_discr_value (outfile, &val->v.val_discr_value);
6576 break;
6577 case dw_val_class_discr_list:
6578 for (dw_discr_list_ref node = val->v.val_discr_list;
6579 node != NULL;
6580 node = node->dw_discr_next)
6581 {
6582 if (node->dw_discr_range)
6583 {
6584 fprintf (outfile, " .. ");
6585 print_discr_value (outfile, &node->dw_discr_lower_bound);
6586 print_discr_value (outfile, &node->dw_discr_upper_bound);
6587 }
6588 else
6589 print_discr_value (outfile, &node->dw_discr_lower_bound);
6590
6591 if (node->dw_discr_next != NULL)
6592 fprintf (outfile, " | ");
6593 }
6594 default:
6595 break;
6596 }
6597 }
6598
6599 /* Likewise, for a DIE attribute. */
6600
6601 static void
6602 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6603 {
6604 print_dw_val (&a->dw_attr_val, recurse, outfile);
6605 }
6606
6607
6608 /* Print the list of operands in the LOC location description to OUTFILE. This
6609 routine is a debugging aid only. */
6610
6611 static void
6612 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6613 {
6614 dw_loc_descr_ref l = loc;
6615
6616 if (loc == NULL)
6617 {
6618 print_spaces (outfile);
6619 fprintf (outfile, "<null>\n");
6620 return;
6621 }
6622
6623 for (l = loc; l != NULL; l = l->dw_loc_next)
6624 {
6625 print_spaces (outfile);
6626 fprintf (outfile, "(%p) %s",
6627 (void *) l,
6628 dwarf_stack_op_name (l->dw_loc_opc));
6629 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6630 {
6631 fprintf (outfile, " ");
6632 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6633 }
6634 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6635 {
6636 fprintf (outfile, ", ");
6637 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6638 }
6639 fprintf (outfile, "\n");
6640 }
6641 }
6642
6643 /* Print the information associated with a given DIE, and its children.
6644 This routine is a debugging aid only. */
6645
6646 static void
6647 print_die (dw_die_ref die, FILE *outfile)
6648 {
6649 dw_attr_node *a;
6650 dw_die_ref c;
6651 unsigned ix;
6652
6653 print_spaces (outfile);
6654 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6655 die->die_offset, dwarf_tag_name (die->die_tag),
6656 (void*) die);
6657 print_spaces (outfile);
6658 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6659 fprintf (outfile, " offset: %ld", die->die_offset);
6660 fprintf (outfile, " mark: %d\n", die->die_mark);
6661
6662 if (die->comdat_type_p)
6663 {
6664 print_spaces (outfile);
6665 fprintf (outfile, " signature: ");
6666 print_signature (outfile, die->die_id.die_type_node->signature);
6667 fprintf (outfile, "\n");
6668 }
6669
6670 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6671 {
6672 print_spaces (outfile);
6673 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6674
6675 print_attribute (a, true, outfile);
6676 fprintf (outfile, "\n");
6677 }
6678
6679 if (die->die_child != NULL)
6680 {
6681 print_indent += 4;
6682 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6683 print_indent -= 4;
6684 }
6685 if (print_indent == 0)
6686 fprintf (outfile, "\n");
6687 }
6688
6689 /* Print the list of operations in the LOC location description. */
6690
6691 DEBUG_FUNCTION void
6692 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6693 {
6694 print_loc_descr (loc, stderr);
6695 }
6696
6697 /* Print the information collected for a given DIE. */
6698
6699 DEBUG_FUNCTION void
6700 debug_dwarf_die (dw_die_ref die)
6701 {
6702 print_die (die, stderr);
6703 }
6704
6705 DEBUG_FUNCTION void
6706 debug (die_struct &ref)
6707 {
6708 print_die (&ref, stderr);
6709 }
6710
6711 DEBUG_FUNCTION void
6712 debug (die_struct *ptr)
6713 {
6714 if (ptr)
6715 debug (*ptr);
6716 else
6717 fprintf (stderr, "<nil>\n");
6718 }
6719
6720
6721 /* Print all DWARF information collected for the compilation unit.
6722 This routine is a debugging aid only. */
6723
6724 DEBUG_FUNCTION void
6725 debug_dwarf (void)
6726 {
6727 print_indent = 0;
6728 print_die (comp_unit_die (), stderr);
6729 }
6730
6731 /* Verify the DIE tree structure. */
6732
6733 DEBUG_FUNCTION void
6734 verify_die (dw_die_ref die)
6735 {
6736 gcc_assert (!die->die_mark);
6737 if (die->die_parent == NULL
6738 && die->die_sib == NULL)
6739 return;
6740 /* Verify the die_sib list is cyclic. */
6741 dw_die_ref x = die;
6742 do
6743 {
6744 x->die_mark = 1;
6745 x = x->die_sib;
6746 }
6747 while (x && !x->die_mark);
6748 gcc_assert (x == die);
6749 x = die;
6750 do
6751 {
6752 /* Verify all dies have the same parent. */
6753 gcc_assert (x->die_parent == die->die_parent);
6754 if (x->die_child)
6755 {
6756 /* Verify the child has the proper parent and recurse. */
6757 gcc_assert (x->die_child->die_parent == x);
6758 verify_die (x->die_child);
6759 }
6760 x->die_mark = 0;
6761 x = x->die_sib;
6762 }
6763 while (x && x->die_mark);
6764 }
6765
6766 /* Sanity checks on DIEs. */
6767
6768 static void
6769 check_die (dw_die_ref die)
6770 {
6771 unsigned ix;
6772 dw_attr_node *a;
6773 bool inline_found = false;
6774 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6775 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6776 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6777 {
6778 switch (a->dw_attr)
6779 {
6780 case DW_AT_inline:
6781 if (a->dw_attr_val.v.val_unsigned)
6782 inline_found = true;
6783 break;
6784 case DW_AT_location:
6785 ++n_location;
6786 break;
6787 case DW_AT_low_pc:
6788 ++n_low_pc;
6789 break;
6790 case DW_AT_high_pc:
6791 ++n_high_pc;
6792 break;
6793 case DW_AT_artificial:
6794 ++n_artificial;
6795 break;
6796 case DW_AT_decl_column:
6797 ++n_decl_column;
6798 break;
6799 case DW_AT_decl_line:
6800 ++n_decl_line;
6801 break;
6802 case DW_AT_decl_file:
6803 ++n_decl_file;
6804 break;
6805 default:
6806 break;
6807 }
6808 }
6809 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6810 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6811 {
6812 fprintf (stderr, "Duplicate attributes in DIE:\n");
6813 debug_dwarf_die (die);
6814 gcc_unreachable ();
6815 }
6816 if (inline_found)
6817 {
6818 /* A debugging information entry that is a member of an abstract
6819 instance tree [that has DW_AT_inline] should not contain any
6820 attributes which describe aspects of the subroutine which vary
6821 between distinct inlined expansions or distinct out-of-line
6822 expansions. */
6823 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6824 gcc_assert (a->dw_attr != DW_AT_low_pc
6825 && a->dw_attr != DW_AT_high_pc
6826 && a->dw_attr != DW_AT_location
6827 && a->dw_attr != DW_AT_frame_base
6828 && a->dw_attr != DW_AT_call_all_calls
6829 && a->dw_attr != DW_AT_GNU_all_call_sites);
6830 }
6831 }
6832 \f
6833 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6834 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6835 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6836
6837 /* Calculate the checksum of a location expression. */
6838
6839 static inline void
6840 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6841 {
6842 int tem;
6843 inchash::hash hstate;
6844 hashval_t hash;
6845
6846 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6847 CHECKSUM (tem);
6848 hash_loc_operands (loc, hstate);
6849 hash = hstate.end();
6850 CHECKSUM (hash);
6851 }
6852
6853 /* Calculate the checksum of an attribute. */
6854
6855 static void
6856 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6857 {
6858 dw_loc_descr_ref loc;
6859 rtx r;
6860
6861 CHECKSUM (at->dw_attr);
6862
6863 /* We don't care that this was compiled with a different compiler
6864 snapshot; if the output is the same, that's what matters. */
6865 if (at->dw_attr == DW_AT_producer)
6866 return;
6867
6868 switch (AT_class (at))
6869 {
6870 case dw_val_class_const:
6871 case dw_val_class_const_implicit:
6872 CHECKSUM (at->dw_attr_val.v.val_int);
6873 break;
6874 case dw_val_class_unsigned_const:
6875 case dw_val_class_unsigned_const_implicit:
6876 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6877 break;
6878 case dw_val_class_const_double:
6879 CHECKSUM (at->dw_attr_val.v.val_double);
6880 break;
6881 case dw_val_class_wide_int:
6882 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6883 get_full_len (*at->dw_attr_val.v.val_wide)
6884 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6885 break;
6886 case dw_val_class_vec:
6887 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6888 (at->dw_attr_val.v.val_vec.length
6889 * at->dw_attr_val.v.val_vec.elt_size));
6890 break;
6891 case dw_val_class_flag:
6892 CHECKSUM (at->dw_attr_val.v.val_flag);
6893 break;
6894 case dw_val_class_str:
6895 CHECKSUM_STRING (AT_string (at));
6896 break;
6897
6898 case dw_val_class_addr:
6899 r = AT_addr (at);
6900 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6901 CHECKSUM_STRING (XSTR (r, 0));
6902 break;
6903
6904 case dw_val_class_offset:
6905 CHECKSUM (at->dw_attr_val.v.val_offset);
6906 break;
6907
6908 case dw_val_class_loc:
6909 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6910 loc_checksum (loc, ctx);
6911 break;
6912
6913 case dw_val_class_die_ref:
6914 die_checksum (AT_ref (at), ctx, mark);
6915 break;
6916
6917 case dw_val_class_fde_ref:
6918 case dw_val_class_vms_delta:
6919 case dw_val_class_symview:
6920 case dw_val_class_lbl_id:
6921 case dw_val_class_lineptr:
6922 case dw_val_class_macptr:
6923 case dw_val_class_loclistsptr:
6924 case dw_val_class_high_pc:
6925 break;
6926
6927 case dw_val_class_file:
6928 case dw_val_class_file_implicit:
6929 CHECKSUM_STRING (AT_file (at)->filename);
6930 break;
6931
6932 case dw_val_class_data8:
6933 CHECKSUM (at->dw_attr_val.v.val_data8);
6934 break;
6935
6936 default:
6937 break;
6938 }
6939 }
6940
6941 /* Calculate the checksum of a DIE. */
6942
6943 static void
6944 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6945 {
6946 dw_die_ref c;
6947 dw_attr_node *a;
6948 unsigned ix;
6949
6950 /* To avoid infinite recursion. */
6951 if (die->die_mark)
6952 {
6953 CHECKSUM (die->die_mark);
6954 return;
6955 }
6956 die->die_mark = ++(*mark);
6957
6958 CHECKSUM (die->die_tag);
6959
6960 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6961 attr_checksum (a, ctx, mark);
6962
6963 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6964 }
6965
6966 #undef CHECKSUM
6967 #undef CHECKSUM_BLOCK
6968 #undef CHECKSUM_STRING
6969
6970 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6971 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6972 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6973 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6974 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6975 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6976 #define CHECKSUM_ATTR(FOO) \
6977 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6978
6979 /* Calculate the checksum of a number in signed LEB128 format. */
6980
6981 static void
6982 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6983 {
6984 unsigned char byte;
6985 bool more;
6986
6987 while (1)
6988 {
6989 byte = (value & 0x7f);
6990 value >>= 7;
6991 more = !((value == 0 && (byte & 0x40) == 0)
6992 || (value == -1 && (byte & 0x40) != 0));
6993 if (more)
6994 byte |= 0x80;
6995 CHECKSUM (byte);
6996 if (!more)
6997 break;
6998 }
6999 }
7000
7001 /* Calculate the checksum of a number in unsigned LEB128 format. */
7002
7003 static void
7004 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7005 {
7006 while (1)
7007 {
7008 unsigned char byte = (value & 0x7f);
7009 value >>= 7;
7010 if (value != 0)
7011 /* More bytes to follow. */
7012 byte |= 0x80;
7013 CHECKSUM (byte);
7014 if (value == 0)
7015 break;
7016 }
7017 }
7018
7019 /* Checksum the context of the DIE. This adds the names of any
7020 surrounding namespaces or structures to the checksum. */
7021
7022 static void
7023 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7024 {
7025 const char *name;
7026 dw_die_ref spec;
7027 int tag = die->die_tag;
7028
7029 if (tag != DW_TAG_namespace
7030 && tag != DW_TAG_structure_type
7031 && tag != DW_TAG_class_type)
7032 return;
7033
7034 name = get_AT_string (die, DW_AT_name);
7035
7036 spec = get_AT_ref (die, DW_AT_specification);
7037 if (spec != NULL)
7038 die = spec;
7039
7040 if (die->die_parent != NULL)
7041 checksum_die_context (die->die_parent, ctx);
7042
7043 CHECKSUM_ULEB128 ('C');
7044 CHECKSUM_ULEB128 (tag);
7045 if (name != NULL)
7046 CHECKSUM_STRING (name);
7047 }
7048
7049 /* Calculate the checksum of a location expression. */
7050
7051 static inline void
7052 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7053 {
7054 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7055 were emitted as a DW_FORM_sdata instead of a location expression. */
7056 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7057 {
7058 CHECKSUM_ULEB128 (DW_FORM_sdata);
7059 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7060 return;
7061 }
7062
7063 /* Otherwise, just checksum the raw location expression. */
7064 while (loc != NULL)
7065 {
7066 inchash::hash hstate;
7067 hashval_t hash;
7068
7069 CHECKSUM_ULEB128 (loc->dtprel);
7070 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7071 hash_loc_operands (loc, hstate);
7072 hash = hstate.end ();
7073 CHECKSUM (hash);
7074 loc = loc->dw_loc_next;
7075 }
7076 }
7077
7078 /* Calculate the checksum of an attribute. */
7079
7080 static void
7081 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7082 struct md5_ctx *ctx, int *mark)
7083 {
7084 dw_loc_descr_ref loc;
7085 rtx r;
7086
7087 if (AT_class (at) == dw_val_class_die_ref)
7088 {
7089 dw_die_ref target_die = AT_ref (at);
7090
7091 /* For pointer and reference types, we checksum only the (qualified)
7092 name of the target type (if there is a name). For friend entries,
7093 we checksum only the (qualified) name of the target type or function.
7094 This allows the checksum to remain the same whether the target type
7095 is complete or not. */
7096 if ((at->dw_attr == DW_AT_type
7097 && (tag == DW_TAG_pointer_type
7098 || tag == DW_TAG_reference_type
7099 || tag == DW_TAG_rvalue_reference_type
7100 || tag == DW_TAG_ptr_to_member_type))
7101 || (at->dw_attr == DW_AT_friend
7102 && tag == DW_TAG_friend))
7103 {
7104 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7105
7106 if (name_attr != NULL)
7107 {
7108 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7109
7110 if (decl == NULL)
7111 decl = target_die;
7112 CHECKSUM_ULEB128 ('N');
7113 CHECKSUM_ULEB128 (at->dw_attr);
7114 if (decl->die_parent != NULL)
7115 checksum_die_context (decl->die_parent, ctx);
7116 CHECKSUM_ULEB128 ('E');
7117 CHECKSUM_STRING (AT_string (name_attr));
7118 return;
7119 }
7120 }
7121
7122 /* For all other references to another DIE, we check to see if the
7123 target DIE has already been visited. If it has, we emit a
7124 backward reference; if not, we descend recursively. */
7125 if (target_die->die_mark > 0)
7126 {
7127 CHECKSUM_ULEB128 ('R');
7128 CHECKSUM_ULEB128 (at->dw_attr);
7129 CHECKSUM_ULEB128 (target_die->die_mark);
7130 }
7131 else
7132 {
7133 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7134
7135 if (decl == NULL)
7136 decl = target_die;
7137 target_die->die_mark = ++(*mark);
7138 CHECKSUM_ULEB128 ('T');
7139 CHECKSUM_ULEB128 (at->dw_attr);
7140 if (decl->die_parent != NULL)
7141 checksum_die_context (decl->die_parent, ctx);
7142 die_checksum_ordered (target_die, ctx, mark);
7143 }
7144 return;
7145 }
7146
7147 CHECKSUM_ULEB128 ('A');
7148 CHECKSUM_ULEB128 (at->dw_attr);
7149
7150 switch (AT_class (at))
7151 {
7152 case dw_val_class_const:
7153 case dw_val_class_const_implicit:
7154 CHECKSUM_ULEB128 (DW_FORM_sdata);
7155 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7156 break;
7157
7158 case dw_val_class_unsigned_const:
7159 case dw_val_class_unsigned_const_implicit:
7160 CHECKSUM_ULEB128 (DW_FORM_sdata);
7161 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7162 break;
7163
7164 case dw_val_class_const_double:
7165 CHECKSUM_ULEB128 (DW_FORM_block);
7166 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7167 CHECKSUM (at->dw_attr_val.v.val_double);
7168 break;
7169
7170 case dw_val_class_wide_int:
7171 CHECKSUM_ULEB128 (DW_FORM_block);
7172 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7173 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7174 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7175 get_full_len (*at->dw_attr_val.v.val_wide)
7176 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7177 break;
7178
7179 case dw_val_class_vec:
7180 CHECKSUM_ULEB128 (DW_FORM_block);
7181 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7182 * at->dw_attr_val.v.val_vec.elt_size);
7183 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7184 (at->dw_attr_val.v.val_vec.length
7185 * at->dw_attr_val.v.val_vec.elt_size));
7186 break;
7187
7188 case dw_val_class_flag:
7189 CHECKSUM_ULEB128 (DW_FORM_flag);
7190 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7191 break;
7192
7193 case dw_val_class_str:
7194 CHECKSUM_ULEB128 (DW_FORM_string);
7195 CHECKSUM_STRING (AT_string (at));
7196 break;
7197
7198 case dw_val_class_addr:
7199 r = AT_addr (at);
7200 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7201 CHECKSUM_ULEB128 (DW_FORM_string);
7202 CHECKSUM_STRING (XSTR (r, 0));
7203 break;
7204
7205 case dw_val_class_offset:
7206 CHECKSUM_ULEB128 (DW_FORM_sdata);
7207 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7208 break;
7209
7210 case dw_val_class_loc:
7211 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7212 loc_checksum_ordered (loc, ctx);
7213 break;
7214
7215 case dw_val_class_fde_ref:
7216 case dw_val_class_symview:
7217 case dw_val_class_lbl_id:
7218 case dw_val_class_lineptr:
7219 case dw_val_class_macptr:
7220 case dw_val_class_loclistsptr:
7221 case dw_val_class_high_pc:
7222 break;
7223
7224 case dw_val_class_file:
7225 case dw_val_class_file_implicit:
7226 CHECKSUM_ULEB128 (DW_FORM_string);
7227 CHECKSUM_STRING (AT_file (at)->filename);
7228 break;
7229
7230 case dw_val_class_data8:
7231 CHECKSUM (at->dw_attr_val.v.val_data8);
7232 break;
7233
7234 default:
7235 break;
7236 }
7237 }
7238
7239 struct checksum_attributes
7240 {
7241 dw_attr_node *at_name;
7242 dw_attr_node *at_type;
7243 dw_attr_node *at_friend;
7244 dw_attr_node *at_accessibility;
7245 dw_attr_node *at_address_class;
7246 dw_attr_node *at_alignment;
7247 dw_attr_node *at_allocated;
7248 dw_attr_node *at_artificial;
7249 dw_attr_node *at_associated;
7250 dw_attr_node *at_binary_scale;
7251 dw_attr_node *at_bit_offset;
7252 dw_attr_node *at_bit_size;
7253 dw_attr_node *at_bit_stride;
7254 dw_attr_node *at_byte_size;
7255 dw_attr_node *at_byte_stride;
7256 dw_attr_node *at_const_value;
7257 dw_attr_node *at_containing_type;
7258 dw_attr_node *at_count;
7259 dw_attr_node *at_data_location;
7260 dw_attr_node *at_data_member_location;
7261 dw_attr_node *at_decimal_scale;
7262 dw_attr_node *at_decimal_sign;
7263 dw_attr_node *at_default_value;
7264 dw_attr_node *at_digit_count;
7265 dw_attr_node *at_discr;
7266 dw_attr_node *at_discr_list;
7267 dw_attr_node *at_discr_value;
7268 dw_attr_node *at_encoding;
7269 dw_attr_node *at_endianity;
7270 dw_attr_node *at_explicit;
7271 dw_attr_node *at_is_optional;
7272 dw_attr_node *at_location;
7273 dw_attr_node *at_lower_bound;
7274 dw_attr_node *at_mutable;
7275 dw_attr_node *at_ordering;
7276 dw_attr_node *at_picture_string;
7277 dw_attr_node *at_prototyped;
7278 dw_attr_node *at_small;
7279 dw_attr_node *at_segment;
7280 dw_attr_node *at_string_length;
7281 dw_attr_node *at_string_length_bit_size;
7282 dw_attr_node *at_string_length_byte_size;
7283 dw_attr_node *at_threads_scaled;
7284 dw_attr_node *at_upper_bound;
7285 dw_attr_node *at_use_location;
7286 dw_attr_node *at_use_UTF8;
7287 dw_attr_node *at_variable_parameter;
7288 dw_attr_node *at_virtuality;
7289 dw_attr_node *at_visibility;
7290 dw_attr_node *at_vtable_elem_location;
7291 };
7292
7293 /* Collect the attributes that we will want to use for the checksum. */
7294
7295 static void
7296 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7297 {
7298 dw_attr_node *a;
7299 unsigned ix;
7300
7301 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7302 {
7303 switch (a->dw_attr)
7304 {
7305 case DW_AT_name:
7306 attrs->at_name = a;
7307 break;
7308 case DW_AT_type:
7309 attrs->at_type = a;
7310 break;
7311 case DW_AT_friend:
7312 attrs->at_friend = a;
7313 break;
7314 case DW_AT_accessibility:
7315 attrs->at_accessibility = a;
7316 break;
7317 case DW_AT_address_class:
7318 attrs->at_address_class = a;
7319 break;
7320 case DW_AT_alignment:
7321 attrs->at_alignment = a;
7322 break;
7323 case DW_AT_allocated:
7324 attrs->at_allocated = a;
7325 break;
7326 case DW_AT_artificial:
7327 attrs->at_artificial = a;
7328 break;
7329 case DW_AT_associated:
7330 attrs->at_associated = a;
7331 break;
7332 case DW_AT_binary_scale:
7333 attrs->at_binary_scale = a;
7334 break;
7335 case DW_AT_bit_offset:
7336 attrs->at_bit_offset = a;
7337 break;
7338 case DW_AT_bit_size:
7339 attrs->at_bit_size = a;
7340 break;
7341 case DW_AT_bit_stride:
7342 attrs->at_bit_stride = a;
7343 break;
7344 case DW_AT_byte_size:
7345 attrs->at_byte_size = a;
7346 break;
7347 case DW_AT_byte_stride:
7348 attrs->at_byte_stride = a;
7349 break;
7350 case DW_AT_const_value:
7351 attrs->at_const_value = a;
7352 break;
7353 case DW_AT_containing_type:
7354 attrs->at_containing_type = a;
7355 break;
7356 case DW_AT_count:
7357 attrs->at_count = a;
7358 break;
7359 case DW_AT_data_location:
7360 attrs->at_data_location = a;
7361 break;
7362 case DW_AT_data_member_location:
7363 attrs->at_data_member_location = a;
7364 break;
7365 case DW_AT_decimal_scale:
7366 attrs->at_decimal_scale = a;
7367 break;
7368 case DW_AT_decimal_sign:
7369 attrs->at_decimal_sign = a;
7370 break;
7371 case DW_AT_default_value:
7372 attrs->at_default_value = a;
7373 break;
7374 case DW_AT_digit_count:
7375 attrs->at_digit_count = a;
7376 break;
7377 case DW_AT_discr:
7378 attrs->at_discr = a;
7379 break;
7380 case DW_AT_discr_list:
7381 attrs->at_discr_list = a;
7382 break;
7383 case DW_AT_discr_value:
7384 attrs->at_discr_value = a;
7385 break;
7386 case DW_AT_encoding:
7387 attrs->at_encoding = a;
7388 break;
7389 case DW_AT_endianity:
7390 attrs->at_endianity = a;
7391 break;
7392 case DW_AT_explicit:
7393 attrs->at_explicit = a;
7394 break;
7395 case DW_AT_is_optional:
7396 attrs->at_is_optional = a;
7397 break;
7398 case DW_AT_location:
7399 attrs->at_location = a;
7400 break;
7401 case DW_AT_lower_bound:
7402 attrs->at_lower_bound = a;
7403 break;
7404 case DW_AT_mutable:
7405 attrs->at_mutable = a;
7406 break;
7407 case DW_AT_ordering:
7408 attrs->at_ordering = a;
7409 break;
7410 case DW_AT_picture_string:
7411 attrs->at_picture_string = a;
7412 break;
7413 case DW_AT_prototyped:
7414 attrs->at_prototyped = a;
7415 break;
7416 case DW_AT_small:
7417 attrs->at_small = a;
7418 break;
7419 case DW_AT_segment:
7420 attrs->at_segment = a;
7421 break;
7422 case DW_AT_string_length:
7423 attrs->at_string_length = a;
7424 break;
7425 case DW_AT_string_length_bit_size:
7426 attrs->at_string_length_bit_size = a;
7427 break;
7428 case DW_AT_string_length_byte_size:
7429 attrs->at_string_length_byte_size = a;
7430 break;
7431 case DW_AT_threads_scaled:
7432 attrs->at_threads_scaled = a;
7433 break;
7434 case DW_AT_upper_bound:
7435 attrs->at_upper_bound = a;
7436 break;
7437 case DW_AT_use_location:
7438 attrs->at_use_location = a;
7439 break;
7440 case DW_AT_use_UTF8:
7441 attrs->at_use_UTF8 = a;
7442 break;
7443 case DW_AT_variable_parameter:
7444 attrs->at_variable_parameter = a;
7445 break;
7446 case DW_AT_virtuality:
7447 attrs->at_virtuality = a;
7448 break;
7449 case DW_AT_visibility:
7450 attrs->at_visibility = a;
7451 break;
7452 case DW_AT_vtable_elem_location:
7453 attrs->at_vtable_elem_location = a;
7454 break;
7455 default:
7456 break;
7457 }
7458 }
7459 }
7460
7461 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7462
7463 static void
7464 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7465 {
7466 dw_die_ref c;
7467 dw_die_ref decl;
7468 struct checksum_attributes attrs;
7469
7470 CHECKSUM_ULEB128 ('D');
7471 CHECKSUM_ULEB128 (die->die_tag);
7472
7473 memset (&attrs, 0, sizeof (attrs));
7474
7475 decl = get_AT_ref (die, DW_AT_specification);
7476 if (decl != NULL)
7477 collect_checksum_attributes (&attrs, decl);
7478 collect_checksum_attributes (&attrs, die);
7479
7480 CHECKSUM_ATTR (attrs.at_name);
7481 CHECKSUM_ATTR (attrs.at_accessibility);
7482 CHECKSUM_ATTR (attrs.at_address_class);
7483 CHECKSUM_ATTR (attrs.at_allocated);
7484 CHECKSUM_ATTR (attrs.at_artificial);
7485 CHECKSUM_ATTR (attrs.at_associated);
7486 CHECKSUM_ATTR (attrs.at_binary_scale);
7487 CHECKSUM_ATTR (attrs.at_bit_offset);
7488 CHECKSUM_ATTR (attrs.at_bit_size);
7489 CHECKSUM_ATTR (attrs.at_bit_stride);
7490 CHECKSUM_ATTR (attrs.at_byte_size);
7491 CHECKSUM_ATTR (attrs.at_byte_stride);
7492 CHECKSUM_ATTR (attrs.at_const_value);
7493 CHECKSUM_ATTR (attrs.at_containing_type);
7494 CHECKSUM_ATTR (attrs.at_count);
7495 CHECKSUM_ATTR (attrs.at_data_location);
7496 CHECKSUM_ATTR (attrs.at_data_member_location);
7497 CHECKSUM_ATTR (attrs.at_decimal_scale);
7498 CHECKSUM_ATTR (attrs.at_decimal_sign);
7499 CHECKSUM_ATTR (attrs.at_default_value);
7500 CHECKSUM_ATTR (attrs.at_digit_count);
7501 CHECKSUM_ATTR (attrs.at_discr);
7502 CHECKSUM_ATTR (attrs.at_discr_list);
7503 CHECKSUM_ATTR (attrs.at_discr_value);
7504 CHECKSUM_ATTR (attrs.at_encoding);
7505 CHECKSUM_ATTR (attrs.at_endianity);
7506 CHECKSUM_ATTR (attrs.at_explicit);
7507 CHECKSUM_ATTR (attrs.at_is_optional);
7508 CHECKSUM_ATTR (attrs.at_location);
7509 CHECKSUM_ATTR (attrs.at_lower_bound);
7510 CHECKSUM_ATTR (attrs.at_mutable);
7511 CHECKSUM_ATTR (attrs.at_ordering);
7512 CHECKSUM_ATTR (attrs.at_picture_string);
7513 CHECKSUM_ATTR (attrs.at_prototyped);
7514 CHECKSUM_ATTR (attrs.at_small);
7515 CHECKSUM_ATTR (attrs.at_segment);
7516 CHECKSUM_ATTR (attrs.at_string_length);
7517 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7518 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7519 CHECKSUM_ATTR (attrs.at_threads_scaled);
7520 CHECKSUM_ATTR (attrs.at_upper_bound);
7521 CHECKSUM_ATTR (attrs.at_use_location);
7522 CHECKSUM_ATTR (attrs.at_use_UTF8);
7523 CHECKSUM_ATTR (attrs.at_variable_parameter);
7524 CHECKSUM_ATTR (attrs.at_virtuality);
7525 CHECKSUM_ATTR (attrs.at_visibility);
7526 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7527 CHECKSUM_ATTR (attrs.at_type);
7528 CHECKSUM_ATTR (attrs.at_friend);
7529 CHECKSUM_ATTR (attrs.at_alignment);
7530
7531 /* Checksum the child DIEs. */
7532 c = die->die_child;
7533 if (c) do {
7534 dw_attr_node *name_attr;
7535
7536 c = c->die_sib;
7537 name_attr = get_AT (c, DW_AT_name);
7538 if (is_template_instantiation (c))
7539 {
7540 /* Ignore instantiations of member type and function templates. */
7541 }
7542 else if (name_attr != NULL
7543 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7544 {
7545 /* Use a shallow checksum for named nested types and member
7546 functions. */
7547 CHECKSUM_ULEB128 ('S');
7548 CHECKSUM_ULEB128 (c->die_tag);
7549 CHECKSUM_STRING (AT_string (name_attr));
7550 }
7551 else
7552 {
7553 /* Use a deep checksum for other children. */
7554 /* Mark this DIE so it gets processed when unmarking. */
7555 if (c->die_mark == 0)
7556 c->die_mark = -1;
7557 die_checksum_ordered (c, ctx, mark);
7558 }
7559 } while (c != die->die_child);
7560
7561 CHECKSUM_ULEB128 (0);
7562 }
7563
7564 /* Add a type name and tag to a hash. */
7565 static void
7566 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7567 {
7568 CHECKSUM_ULEB128 (tag);
7569 CHECKSUM_STRING (name);
7570 }
7571
7572 #undef CHECKSUM
7573 #undef CHECKSUM_STRING
7574 #undef CHECKSUM_ATTR
7575 #undef CHECKSUM_LEB128
7576 #undef CHECKSUM_ULEB128
7577
7578 /* Generate the type signature for DIE. This is computed by generating an
7579 MD5 checksum over the DIE's tag, its relevant attributes, and its
7580 children. Attributes that are references to other DIEs are processed
7581 by recursion, using the MARK field to prevent infinite recursion.
7582 If the DIE is nested inside a namespace or another type, we also
7583 need to include that context in the signature. The lower 64 bits
7584 of the resulting MD5 checksum comprise the signature. */
7585
7586 static void
7587 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7588 {
7589 int mark;
7590 const char *name;
7591 unsigned char checksum[16];
7592 struct md5_ctx ctx;
7593 dw_die_ref decl;
7594 dw_die_ref parent;
7595
7596 name = get_AT_string (die, DW_AT_name);
7597 decl = get_AT_ref (die, DW_AT_specification);
7598 parent = get_die_parent (die);
7599
7600 /* First, compute a signature for just the type name (and its surrounding
7601 context, if any. This is stored in the type unit DIE for link-time
7602 ODR (one-definition rule) checking. */
7603
7604 if (is_cxx () && name != NULL)
7605 {
7606 md5_init_ctx (&ctx);
7607
7608 /* Checksum the names of surrounding namespaces and structures. */
7609 if (parent != NULL)
7610 checksum_die_context (parent, &ctx);
7611
7612 /* Checksum the current DIE. */
7613 die_odr_checksum (die->die_tag, name, &ctx);
7614 md5_finish_ctx (&ctx, checksum);
7615
7616 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7617 }
7618
7619 /* Next, compute the complete type signature. */
7620
7621 md5_init_ctx (&ctx);
7622 mark = 1;
7623 die->die_mark = mark;
7624
7625 /* Checksum the names of surrounding namespaces and structures. */
7626 if (parent != NULL)
7627 checksum_die_context (parent, &ctx);
7628
7629 /* Checksum the DIE and its children. */
7630 die_checksum_ordered (die, &ctx, &mark);
7631 unmark_all_dies (die);
7632 md5_finish_ctx (&ctx, checksum);
7633
7634 /* Store the signature in the type node and link the type DIE and the
7635 type node together. */
7636 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7637 DWARF_TYPE_SIGNATURE_SIZE);
7638 die->comdat_type_p = true;
7639 die->die_id.die_type_node = type_node;
7640 type_node->type_die = die;
7641
7642 /* If the DIE is a specification, link its declaration to the type node
7643 as well. */
7644 if (decl != NULL)
7645 {
7646 decl->comdat_type_p = true;
7647 decl->die_id.die_type_node = type_node;
7648 }
7649 }
7650
7651 /* Do the location expressions look same? */
7652 static inline int
7653 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7654 {
7655 return loc1->dw_loc_opc == loc2->dw_loc_opc
7656 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7657 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7658 }
7659
7660 /* Do the values look the same? */
7661 static int
7662 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7663 {
7664 dw_loc_descr_ref loc1, loc2;
7665 rtx r1, r2;
7666
7667 if (v1->val_class != v2->val_class)
7668 return 0;
7669
7670 switch (v1->val_class)
7671 {
7672 case dw_val_class_const:
7673 case dw_val_class_const_implicit:
7674 return v1->v.val_int == v2->v.val_int;
7675 case dw_val_class_unsigned_const:
7676 case dw_val_class_unsigned_const_implicit:
7677 return v1->v.val_unsigned == v2->v.val_unsigned;
7678 case dw_val_class_const_double:
7679 return v1->v.val_double.high == v2->v.val_double.high
7680 && v1->v.val_double.low == v2->v.val_double.low;
7681 case dw_val_class_wide_int:
7682 return *v1->v.val_wide == *v2->v.val_wide;
7683 case dw_val_class_vec:
7684 if (v1->v.val_vec.length != v2->v.val_vec.length
7685 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7686 return 0;
7687 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7688 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7689 return 0;
7690 return 1;
7691 case dw_val_class_flag:
7692 return v1->v.val_flag == v2->v.val_flag;
7693 case dw_val_class_str:
7694 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7695
7696 case dw_val_class_addr:
7697 r1 = v1->v.val_addr;
7698 r2 = v2->v.val_addr;
7699 if (GET_CODE (r1) != GET_CODE (r2))
7700 return 0;
7701 return !rtx_equal_p (r1, r2);
7702
7703 case dw_val_class_offset:
7704 return v1->v.val_offset == v2->v.val_offset;
7705
7706 case dw_val_class_loc:
7707 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7708 loc1 && loc2;
7709 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7710 if (!same_loc_p (loc1, loc2, mark))
7711 return 0;
7712 return !loc1 && !loc2;
7713
7714 case dw_val_class_die_ref:
7715 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7716
7717 case dw_val_class_symview:
7718 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7719
7720 case dw_val_class_fde_ref:
7721 case dw_val_class_vms_delta:
7722 case dw_val_class_lbl_id:
7723 case dw_val_class_lineptr:
7724 case dw_val_class_macptr:
7725 case dw_val_class_loclistsptr:
7726 case dw_val_class_high_pc:
7727 return 1;
7728
7729 case dw_val_class_file:
7730 case dw_val_class_file_implicit:
7731 return v1->v.val_file == v2->v.val_file;
7732
7733 case dw_val_class_data8:
7734 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7735
7736 default:
7737 return 1;
7738 }
7739 }
7740
7741 /* Do the attributes look the same? */
7742
7743 static int
7744 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7745 {
7746 if (at1->dw_attr != at2->dw_attr)
7747 return 0;
7748
7749 /* We don't care that this was compiled with a different compiler
7750 snapshot; if the output is the same, that's what matters. */
7751 if (at1->dw_attr == DW_AT_producer)
7752 return 1;
7753
7754 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7755 }
7756
7757 /* Do the dies look the same? */
7758
7759 static int
7760 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7761 {
7762 dw_die_ref c1, c2;
7763 dw_attr_node *a1;
7764 unsigned ix;
7765
7766 /* To avoid infinite recursion. */
7767 if (die1->die_mark)
7768 return die1->die_mark == die2->die_mark;
7769 die1->die_mark = die2->die_mark = ++(*mark);
7770
7771 if (die1->die_tag != die2->die_tag)
7772 return 0;
7773
7774 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7775 return 0;
7776
7777 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7778 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7779 return 0;
7780
7781 c1 = die1->die_child;
7782 c2 = die2->die_child;
7783 if (! c1)
7784 {
7785 if (c2)
7786 return 0;
7787 }
7788 else
7789 for (;;)
7790 {
7791 if (!same_die_p (c1, c2, mark))
7792 return 0;
7793 c1 = c1->die_sib;
7794 c2 = c2->die_sib;
7795 if (c1 == die1->die_child)
7796 {
7797 if (c2 == die2->die_child)
7798 break;
7799 else
7800 return 0;
7801 }
7802 }
7803
7804 return 1;
7805 }
7806
7807 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7808 children, and set die_symbol. */
7809
7810 static void
7811 compute_comp_unit_symbol (dw_die_ref unit_die)
7812 {
7813 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7814 const char *base = die_name ? lbasename (die_name) : "anonymous";
7815 char *name = XALLOCAVEC (char, strlen (base) + 64);
7816 char *p;
7817 int i, mark;
7818 unsigned char checksum[16];
7819 struct md5_ctx ctx;
7820
7821 /* Compute the checksum of the DIE, then append part of it as hex digits to
7822 the name filename of the unit. */
7823
7824 md5_init_ctx (&ctx);
7825 mark = 0;
7826 die_checksum (unit_die, &ctx, &mark);
7827 unmark_all_dies (unit_die);
7828 md5_finish_ctx (&ctx, checksum);
7829
7830 /* When we this for comp_unit_die () we have a DW_AT_name that might
7831 not start with a letter but with anything valid for filenames and
7832 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7833 character is not a letter. */
7834 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7835 clean_symbol_name (name);
7836
7837 p = name + strlen (name);
7838 for (i = 0; i < 4; i++)
7839 {
7840 sprintf (p, "%.2x", checksum[i]);
7841 p += 2;
7842 }
7843
7844 unit_die->die_id.die_symbol = xstrdup (name);
7845 }
7846
7847 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7848
7849 static int
7850 is_type_die (dw_die_ref die)
7851 {
7852 switch (die->die_tag)
7853 {
7854 case DW_TAG_array_type:
7855 case DW_TAG_class_type:
7856 case DW_TAG_interface_type:
7857 case DW_TAG_enumeration_type:
7858 case DW_TAG_pointer_type:
7859 case DW_TAG_reference_type:
7860 case DW_TAG_rvalue_reference_type:
7861 case DW_TAG_string_type:
7862 case DW_TAG_structure_type:
7863 case DW_TAG_subroutine_type:
7864 case DW_TAG_union_type:
7865 case DW_TAG_ptr_to_member_type:
7866 case DW_TAG_set_type:
7867 case DW_TAG_subrange_type:
7868 case DW_TAG_base_type:
7869 case DW_TAG_const_type:
7870 case DW_TAG_file_type:
7871 case DW_TAG_packed_type:
7872 case DW_TAG_volatile_type:
7873 case DW_TAG_typedef:
7874 return 1;
7875 default:
7876 return 0;
7877 }
7878 }
7879
7880 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7881 Basically, we want to choose the bits that are likely to be shared between
7882 compilations (types) and leave out the bits that are specific to individual
7883 compilations (functions). */
7884
7885 static int
7886 is_comdat_die (dw_die_ref c)
7887 {
7888 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7889 we do for stabs. The advantage is a greater likelihood of sharing between
7890 objects that don't include headers in the same order (and therefore would
7891 put the base types in a different comdat). jason 8/28/00 */
7892
7893 if (c->die_tag == DW_TAG_base_type)
7894 return 0;
7895
7896 if (c->die_tag == DW_TAG_pointer_type
7897 || c->die_tag == DW_TAG_reference_type
7898 || c->die_tag == DW_TAG_rvalue_reference_type
7899 || c->die_tag == DW_TAG_const_type
7900 || c->die_tag == DW_TAG_volatile_type)
7901 {
7902 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7903
7904 return t ? is_comdat_die (t) : 0;
7905 }
7906
7907 return is_type_die (c);
7908 }
7909
7910 /* Returns true iff C is a compile-unit DIE. */
7911
7912 static inline bool
7913 is_cu_die (dw_die_ref c)
7914 {
7915 return c && (c->die_tag == DW_TAG_compile_unit
7916 || c->die_tag == DW_TAG_skeleton_unit);
7917 }
7918
7919 /* Returns true iff C is a unit DIE of some sort. */
7920
7921 static inline bool
7922 is_unit_die (dw_die_ref c)
7923 {
7924 return c && (c->die_tag == DW_TAG_compile_unit
7925 || c->die_tag == DW_TAG_partial_unit
7926 || c->die_tag == DW_TAG_type_unit
7927 || c->die_tag == DW_TAG_skeleton_unit);
7928 }
7929
7930 /* Returns true iff C is a namespace DIE. */
7931
7932 static inline bool
7933 is_namespace_die (dw_die_ref c)
7934 {
7935 return c && c->die_tag == DW_TAG_namespace;
7936 }
7937
7938 /* Returns true iff C is a class or structure DIE. */
7939
7940 static inline bool
7941 is_class_die (dw_die_ref c)
7942 {
7943 return c && (c->die_tag == DW_TAG_class_type
7944 || c->die_tag == DW_TAG_structure_type);
7945 }
7946
7947 /* Return non-zero if this DIE is a template parameter. */
7948
7949 static inline bool
7950 is_template_parameter (dw_die_ref die)
7951 {
7952 switch (die->die_tag)
7953 {
7954 case DW_TAG_template_type_param:
7955 case DW_TAG_template_value_param:
7956 case DW_TAG_GNU_template_template_param:
7957 case DW_TAG_GNU_template_parameter_pack:
7958 return true;
7959 default:
7960 return false;
7961 }
7962 }
7963
7964 /* Return non-zero if this DIE represents a template instantiation. */
7965
7966 static inline bool
7967 is_template_instantiation (dw_die_ref die)
7968 {
7969 dw_die_ref c;
7970
7971 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7972 return false;
7973 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7974 return false;
7975 }
7976
7977 static char *
7978 gen_internal_sym (const char *prefix)
7979 {
7980 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7981
7982 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7983 return xstrdup (buf);
7984 }
7985
7986 /* Return non-zero if this DIE is a declaration. */
7987
7988 static int
7989 is_declaration_die (dw_die_ref die)
7990 {
7991 dw_attr_node *a;
7992 unsigned ix;
7993
7994 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7995 if (a->dw_attr == DW_AT_declaration)
7996 return 1;
7997
7998 return 0;
7999 }
8000
8001 /* Return non-zero if this DIE is nested inside a subprogram. */
8002
8003 static int
8004 is_nested_in_subprogram (dw_die_ref die)
8005 {
8006 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
8007
8008 if (decl == NULL)
8009 decl = die;
8010 return local_scope_p (decl);
8011 }
8012
8013 /* Return non-zero if this DIE contains a defining declaration of a
8014 subprogram. */
8015
8016 static int
8017 contains_subprogram_definition (dw_die_ref die)
8018 {
8019 dw_die_ref c;
8020
8021 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8022 return 1;
8023 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8024 return 0;
8025 }
8026
8027 /* Return non-zero if this is a type DIE that should be moved to a
8028 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8029 unit type. */
8030
8031 static int
8032 should_move_die_to_comdat (dw_die_ref die)
8033 {
8034 switch (die->die_tag)
8035 {
8036 case DW_TAG_class_type:
8037 case DW_TAG_structure_type:
8038 case DW_TAG_enumeration_type:
8039 case DW_TAG_union_type:
8040 /* Don't move declarations, inlined instances, types nested in a
8041 subprogram, or types that contain subprogram definitions. */
8042 if (is_declaration_die (die)
8043 || get_AT (die, DW_AT_abstract_origin)
8044 || is_nested_in_subprogram (die)
8045 || contains_subprogram_definition (die))
8046 return 0;
8047 return 1;
8048 case DW_TAG_array_type:
8049 case DW_TAG_interface_type:
8050 case DW_TAG_pointer_type:
8051 case DW_TAG_reference_type:
8052 case DW_TAG_rvalue_reference_type:
8053 case DW_TAG_string_type:
8054 case DW_TAG_subroutine_type:
8055 case DW_TAG_ptr_to_member_type:
8056 case DW_TAG_set_type:
8057 case DW_TAG_subrange_type:
8058 case DW_TAG_base_type:
8059 case DW_TAG_const_type:
8060 case DW_TAG_file_type:
8061 case DW_TAG_packed_type:
8062 case DW_TAG_volatile_type:
8063 case DW_TAG_typedef:
8064 default:
8065 return 0;
8066 }
8067 }
8068
8069 /* Make a clone of DIE. */
8070
8071 static dw_die_ref
8072 clone_die (dw_die_ref die)
8073 {
8074 dw_die_ref clone = new_die_raw (die->die_tag);
8075 dw_attr_node *a;
8076 unsigned ix;
8077
8078 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8079 add_dwarf_attr (clone, a);
8080
8081 return clone;
8082 }
8083
8084 /* Make a clone of the tree rooted at DIE. */
8085
8086 static dw_die_ref
8087 clone_tree (dw_die_ref die)
8088 {
8089 dw_die_ref c;
8090 dw_die_ref clone = clone_die (die);
8091
8092 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8093
8094 return clone;
8095 }
8096
8097 /* Make a clone of DIE as a declaration. */
8098
8099 static dw_die_ref
8100 clone_as_declaration (dw_die_ref die)
8101 {
8102 dw_die_ref clone;
8103 dw_die_ref decl;
8104 dw_attr_node *a;
8105 unsigned ix;
8106
8107 /* If the DIE is already a declaration, just clone it. */
8108 if (is_declaration_die (die))
8109 return clone_die (die);
8110
8111 /* If the DIE is a specification, just clone its declaration DIE. */
8112 decl = get_AT_ref (die, DW_AT_specification);
8113 if (decl != NULL)
8114 {
8115 clone = clone_die (decl);
8116 if (die->comdat_type_p)
8117 add_AT_die_ref (clone, DW_AT_signature, die);
8118 return clone;
8119 }
8120
8121 clone = new_die_raw (die->die_tag);
8122
8123 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8124 {
8125 /* We don't want to copy over all attributes.
8126 For example we don't want DW_AT_byte_size because otherwise we will no
8127 longer have a declaration and GDB will treat it as a definition. */
8128
8129 switch (a->dw_attr)
8130 {
8131 case DW_AT_abstract_origin:
8132 case DW_AT_artificial:
8133 case DW_AT_containing_type:
8134 case DW_AT_external:
8135 case DW_AT_name:
8136 case DW_AT_type:
8137 case DW_AT_virtuality:
8138 case DW_AT_linkage_name:
8139 case DW_AT_MIPS_linkage_name:
8140 add_dwarf_attr (clone, a);
8141 break;
8142 case DW_AT_byte_size:
8143 case DW_AT_alignment:
8144 default:
8145 break;
8146 }
8147 }
8148
8149 if (die->comdat_type_p)
8150 add_AT_die_ref (clone, DW_AT_signature, die);
8151
8152 add_AT_flag (clone, DW_AT_declaration, 1);
8153 return clone;
8154 }
8155
8156
8157 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8158
8159 struct decl_table_entry
8160 {
8161 dw_die_ref orig;
8162 dw_die_ref copy;
8163 };
8164
8165 /* Helpers to manipulate hash table of copied declarations. */
8166
8167 /* Hashtable helpers. */
8168
8169 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8170 {
8171 typedef die_struct *compare_type;
8172 static inline hashval_t hash (const decl_table_entry *);
8173 static inline bool equal (const decl_table_entry *, const die_struct *);
8174 };
8175
8176 inline hashval_t
8177 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8178 {
8179 return htab_hash_pointer (entry->orig);
8180 }
8181
8182 inline bool
8183 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8184 const die_struct *entry2)
8185 {
8186 return entry1->orig == entry2;
8187 }
8188
8189 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8190
8191 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8192 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8193 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8194 to check if the ancestor has already been copied into UNIT. */
8195
8196 static dw_die_ref
8197 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8198 decl_hash_type *decl_table)
8199 {
8200 dw_die_ref parent = die->die_parent;
8201 dw_die_ref new_parent = unit;
8202 dw_die_ref copy;
8203 decl_table_entry **slot = NULL;
8204 struct decl_table_entry *entry = NULL;
8205
8206 if (decl_table)
8207 {
8208 /* Check if the entry has already been copied to UNIT. */
8209 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8210 INSERT);
8211 if (*slot != HTAB_EMPTY_ENTRY)
8212 {
8213 entry = *slot;
8214 return entry->copy;
8215 }
8216
8217 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8218 entry = XCNEW (struct decl_table_entry);
8219 entry->orig = die;
8220 entry->copy = NULL;
8221 *slot = entry;
8222 }
8223
8224 if (parent != NULL)
8225 {
8226 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8227 if (spec != NULL)
8228 parent = spec;
8229 if (!is_unit_die (parent))
8230 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8231 }
8232
8233 copy = clone_as_declaration (die);
8234 add_child_die (new_parent, copy);
8235
8236 if (decl_table)
8237 {
8238 /* Record the pointer to the copy. */
8239 entry->copy = copy;
8240 }
8241
8242 return copy;
8243 }
8244 /* Copy the declaration context to the new type unit DIE. This includes
8245 any surrounding namespace or type declarations. If the DIE has an
8246 AT_specification attribute, it also includes attributes and children
8247 attached to the specification, and returns a pointer to the original
8248 parent of the declaration DIE. Returns NULL otherwise. */
8249
8250 static dw_die_ref
8251 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8252 {
8253 dw_die_ref decl;
8254 dw_die_ref new_decl;
8255 dw_die_ref orig_parent = NULL;
8256
8257 decl = get_AT_ref (die, DW_AT_specification);
8258 if (decl == NULL)
8259 decl = die;
8260 else
8261 {
8262 unsigned ix;
8263 dw_die_ref c;
8264 dw_attr_node *a;
8265
8266 /* The original DIE will be changed to a declaration, and must
8267 be moved to be a child of the original declaration DIE. */
8268 orig_parent = decl->die_parent;
8269
8270 /* Copy the type node pointer from the new DIE to the original
8271 declaration DIE so we can forward references later. */
8272 decl->comdat_type_p = true;
8273 decl->die_id.die_type_node = die->die_id.die_type_node;
8274
8275 remove_AT (die, DW_AT_specification);
8276
8277 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8278 {
8279 if (a->dw_attr != DW_AT_name
8280 && a->dw_attr != DW_AT_declaration
8281 && a->dw_attr != DW_AT_external)
8282 add_dwarf_attr (die, a);
8283 }
8284
8285 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8286 }
8287
8288 if (decl->die_parent != NULL
8289 && !is_unit_die (decl->die_parent))
8290 {
8291 new_decl = copy_ancestor_tree (unit, decl, NULL);
8292 if (new_decl != NULL)
8293 {
8294 remove_AT (new_decl, DW_AT_signature);
8295 add_AT_specification (die, new_decl);
8296 }
8297 }
8298
8299 return orig_parent;
8300 }
8301
8302 /* Generate the skeleton ancestor tree for the given NODE, then clone
8303 the DIE and add the clone into the tree. */
8304
8305 static void
8306 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8307 {
8308 if (node->new_die != NULL)
8309 return;
8310
8311 node->new_die = clone_as_declaration (node->old_die);
8312
8313 if (node->parent != NULL)
8314 {
8315 generate_skeleton_ancestor_tree (node->parent);
8316 add_child_die (node->parent->new_die, node->new_die);
8317 }
8318 }
8319
8320 /* Generate a skeleton tree of DIEs containing any declarations that are
8321 found in the original tree. We traverse the tree looking for declaration
8322 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8323
8324 static void
8325 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8326 {
8327 skeleton_chain_node node;
8328 dw_die_ref c;
8329 dw_die_ref first;
8330 dw_die_ref prev = NULL;
8331 dw_die_ref next = NULL;
8332
8333 node.parent = parent;
8334
8335 first = c = parent->old_die->die_child;
8336 if (c)
8337 next = c->die_sib;
8338 if (c) do {
8339 if (prev == NULL || prev->die_sib == c)
8340 prev = c;
8341 c = next;
8342 next = (c == first ? NULL : c->die_sib);
8343 node.old_die = c;
8344 node.new_die = NULL;
8345 if (is_declaration_die (c))
8346 {
8347 if (is_template_instantiation (c))
8348 {
8349 /* Instantiated templates do not need to be cloned into the
8350 type unit. Just move the DIE and its children back to
8351 the skeleton tree (in the main CU). */
8352 remove_child_with_prev (c, prev);
8353 add_child_die (parent->new_die, c);
8354 c = prev;
8355 }
8356 else if (c->comdat_type_p)
8357 {
8358 /* This is the skeleton of earlier break_out_comdat_types
8359 type. Clone the existing DIE, but keep the children
8360 under the original (which is in the main CU). */
8361 dw_die_ref clone = clone_die (c);
8362
8363 replace_child (c, clone, prev);
8364 generate_skeleton_ancestor_tree (parent);
8365 add_child_die (parent->new_die, c);
8366 c = clone;
8367 continue;
8368 }
8369 else
8370 {
8371 /* Clone the existing DIE, move the original to the skeleton
8372 tree (which is in the main CU), and put the clone, with
8373 all the original's children, where the original came from
8374 (which is about to be moved to the type unit). */
8375 dw_die_ref clone = clone_die (c);
8376 move_all_children (c, clone);
8377
8378 /* If the original has a DW_AT_object_pointer attribute,
8379 it would now point to a child DIE just moved to the
8380 cloned tree, so we need to remove that attribute from
8381 the original. */
8382 remove_AT (c, DW_AT_object_pointer);
8383
8384 replace_child (c, clone, prev);
8385 generate_skeleton_ancestor_tree (parent);
8386 add_child_die (parent->new_die, c);
8387 node.old_die = clone;
8388 node.new_die = c;
8389 c = clone;
8390 }
8391 }
8392 generate_skeleton_bottom_up (&node);
8393 } while (next != NULL);
8394 }
8395
8396 /* Wrapper function for generate_skeleton_bottom_up. */
8397
8398 static dw_die_ref
8399 generate_skeleton (dw_die_ref die)
8400 {
8401 skeleton_chain_node node;
8402
8403 node.old_die = die;
8404 node.new_die = NULL;
8405 node.parent = NULL;
8406
8407 /* If this type definition is nested inside another type,
8408 and is not an instantiation of a template, always leave
8409 at least a declaration in its place. */
8410 if (die->die_parent != NULL
8411 && is_type_die (die->die_parent)
8412 && !is_template_instantiation (die))
8413 node.new_die = clone_as_declaration (die);
8414
8415 generate_skeleton_bottom_up (&node);
8416 return node.new_die;
8417 }
8418
8419 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8420 declaration. The original DIE is moved to a new compile unit so that
8421 existing references to it follow it to the new location. If any of the
8422 original DIE's descendants is a declaration, we need to replace the
8423 original DIE with a skeleton tree and move the declarations back into the
8424 skeleton tree. */
8425
8426 static dw_die_ref
8427 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8428 dw_die_ref prev)
8429 {
8430 dw_die_ref skeleton, orig_parent;
8431
8432 /* Copy the declaration context to the type unit DIE. If the returned
8433 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8434 that DIE. */
8435 orig_parent = copy_declaration_context (unit, child);
8436
8437 skeleton = generate_skeleton (child);
8438 if (skeleton == NULL)
8439 remove_child_with_prev (child, prev);
8440 else
8441 {
8442 skeleton->comdat_type_p = true;
8443 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8444
8445 /* If the original DIE was a specification, we need to put
8446 the skeleton under the parent DIE of the declaration.
8447 This leaves the original declaration in the tree, but
8448 it will be pruned later since there are no longer any
8449 references to it. */
8450 if (orig_parent != NULL)
8451 {
8452 remove_child_with_prev (child, prev);
8453 add_child_die (orig_parent, skeleton);
8454 }
8455 else
8456 replace_child (child, skeleton, prev);
8457 }
8458
8459 return skeleton;
8460 }
8461
8462 static void
8463 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8464 comdat_type_node *type_node,
8465 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8466
8467 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8468 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8469 DWARF procedure references in the DW_AT_location attribute. */
8470
8471 static dw_die_ref
8472 copy_dwarf_procedure (dw_die_ref die,
8473 comdat_type_node *type_node,
8474 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8475 {
8476 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8477
8478 /* DWARF procedures are not supposed to have children... */
8479 gcc_assert (die->die_child == NULL);
8480
8481 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8482 gcc_assert (vec_safe_length (die->die_attr) == 1
8483 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8484
8485 /* Do not copy more than once DWARF procedures. */
8486 bool existed;
8487 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8488 if (existed)
8489 return die_copy;
8490
8491 die_copy = clone_die (die);
8492 add_child_die (type_node->root_die, die_copy);
8493 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8494 return die_copy;
8495 }
8496
8497 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8498 procedures in DIE's attributes. */
8499
8500 static void
8501 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8502 comdat_type_node *type_node,
8503 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8504 {
8505 dw_attr_node *a;
8506 unsigned i;
8507
8508 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8509 {
8510 dw_loc_descr_ref loc;
8511
8512 if (a->dw_attr_val.val_class != dw_val_class_loc)
8513 continue;
8514
8515 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8516 {
8517 switch (loc->dw_loc_opc)
8518 {
8519 case DW_OP_call2:
8520 case DW_OP_call4:
8521 case DW_OP_call_ref:
8522 gcc_assert (loc->dw_loc_oprnd1.val_class
8523 == dw_val_class_die_ref);
8524 loc->dw_loc_oprnd1.v.val_die_ref.die
8525 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8526 type_node,
8527 copied_dwarf_procs);
8528
8529 default:
8530 break;
8531 }
8532 }
8533 }
8534 }
8535
8536 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8537 rewrite references to point to the copies.
8538
8539 References are looked for in DIE's attributes and recursively in all its
8540 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8541 mapping from old DWARF procedures to their copy. It is used not to copy
8542 twice the same DWARF procedure under TYPE_NODE. */
8543
8544 static void
8545 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8546 comdat_type_node *type_node,
8547 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8548 {
8549 dw_die_ref c;
8550
8551 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8552 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8553 type_node,
8554 copied_dwarf_procs));
8555 }
8556
8557 /* Traverse the DIE and set up additional .debug_types or .debug_info
8558 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8559 section. */
8560
8561 static void
8562 break_out_comdat_types (dw_die_ref die)
8563 {
8564 dw_die_ref c;
8565 dw_die_ref first;
8566 dw_die_ref prev = NULL;
8567 dw_die_ref next = NULL;
8568 dw_die_ref unit = NULL;
8569
8570 first = c = die->die_child;
8571 if (c)
8572 next = c->die_sib;
8573 if (c) do {
8574 if (prev == NULL || prev->die_sib == c)
8575 prev = c;
8576 c = next;
8577 next = (c == first ? NULL : c->die_sib);
8578 if (should_move_die_to_comdat (c))
8579 {
8580 dw_die_ref replacement;
8581 comdat_type_node *type_node;
8582
8583 /* Break out nested types into their own type units. */
8584 break_out_comdat_types (c);
8585
8586 /* Create a new type unit DIE as the root for the new tree, and
8587 add it to the list of comdat types. */
8588 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8589 add_AT_unsigned (unit, DW_AT_language,
8590 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8591 type_node = ggc_cleared_alloc<comdat_type_node> ();
8592 type_node->root_die = unit;
8593 type_node->next = comdat_type_list;
8594 comdat_type_list = type_node;
8595
8596 /* Generate the type signature. */
8597 generate_type_signature (c, type_node);
8598
8599 /* Copy the declaration context, attributes, and children of the
8600 declaration into the new type unit DIE, then remove this DIE
8601 from the main CU (or replace it with a skeleton if necessary). */
8602 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8603 type_node->skeleton_die = replacement;
8604
8605 /* Add the DIE to the new compunit. */
8606 add_child_die (unit, c);
8607
8608 /* Types can reference DWARF procedures for type size or data location
8609 expressions. Calls in DWARF expressions cannot target procedures
8610 that are not in the same section. So we must copy DWARF procedures
8611 along with this type and then rewrite references to them. */
8612 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8613 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8614
8615 if (replacement != NULL)
8616 c = replacement;
8617 }
8618 else if (c->die_tag == DW_TAG_namespace
8619 || c->die_tag == DW_TAG_class_type
8620 || c->die_tag == DW_TAG_structure_type
8621 || c->die_tag == DW_TAG_union_type)
8622 {
8623 /* Look for nested types that can be broken out. */
8624 break_out_comdat_types (c);
8625 }
8626 } while (next != NULL);
8627 }
8628
8629 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8630 Enter all the cloned children into the hash table decl_table. */
8631
8632 static dw_die_ref
8633 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8634 {
8635 dw_die_ref c;
8636 dw_die_ref clone;
8637 struct decl_table_entry *entry;
8638 decl_table_entry **slot;
8639
8640 if (die->die_tag == DW_TAG_subprogram)
8641 clone = clone_as_declaration (die);
8642 else
8643 clone = clone_die (die);
8644
8645 slot = decl_table->find_slot_with_hash (die,
8646 htab_hash_pointer (die), INSERT);
8647
8648 /* Assert that DIE isn't in the hash table yet. If it would be there
8649 before, the ancestors would be necessarily there as well, therefore
8650 clone_tree_partial wouldn't be called. */
8651 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8652
8653 entry = XCNEW (struct decl_table_entry);
8654 entry->orig = die;
8655 entry->copy = clone;
8656 *slot = entry;
8657
8658 if (die->die_tag != DW_TAG_subprogram)
8659 FOR_EACH_CHILD (die, c,
8660 add_child_die (clone, clone_tree_partial (c, decl_table)));
8661
8662 return clone;
8663 }
8664
8665 /* Walk the DIE and its children, looking for references to incomplete
8666 or trivial types that are unmarked (i.e., that are not in the current
8667 type_unit). */
8668
8669 static void
8670 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8671 {
8672 dw_die_ref c;
8673 dw_attr_node *a;
8674 unsigned ix;
8675
8676 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8677 {
8678 if (AT_class (a) == dw_val_class_die_ref)
8679 {
8680 dw_die_ref targ = AT_ref (a);
8681 decl_table_entry **slot;
8682 struct decl_table_entry *entry;
8683
8684 if (targ->die_mark != 0 || targ->comdat_type_p)
8685 continue;
8686
8687 slot = decl_table->find_slot_with_hash (targ,
8688 htab_hash_pointer (targ),
8689 INSERT);
8690
8691 if (*slot != HTAB_EMPTY_ENTRY)
8692 {
8693 /* TARG has already been copied, so we just need to
8694 modify the reference to point to the copy. */
8695 entry = *slot;
8696 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8697 }
8698 else
8699 {
8700 dw_die_ref parent = unit;
8701 dw_die_ref copy = clone_die (targ);
8702
8703 /* Record in DECL_TABLE that TARG has been copied.
8704 Need to do this now, before the recursive call,
8705 because DECL_TABLE may be expanded and SLOT
8706 would no longer be a valid pointer. */
8707 entry = XCNEW (struct decl_table_entry);
8708 entry->orig = targ;
8709 entry->copy = copy;
8710 *slot = entry;
8711
8712 /* If TARG is not a declaration DIE, we need to copy its
8713 children. */
8714 if (!is_declaration_die (targ))
8715 {
8716 FOR_EACH_CHILD (
8717 targ, c,
8718 add_child_die (copy,
8719 clone_tree_partial (c, decl_table)));
8720 }
8721
8722 /* Make sure the cloned tree is marked as part of the
8723 type unit. */
8724 mark_dies (copy);
8725
8726 /* If TARG has surrounding context, copy its ancestor tree
8727 into the new type unit. */
8728 if (targ->die_parent != NULL
8729 && !is_unit_die (targ->die_parent))
8730 parent = copy_ancestor_tree (unit, targ->die_parent,
8731 decl_table);
8732
8733 add_child_die (parent, copy);
8734 a->dw_attr_val.v.val_die_ref.die = copy;
8735
8736 /* Make sure the newly-copied DIE is walked. If it was
8737 installed in a previously-added context, it won't
8738 get visited otherwise. */
8739 if (parent != unit)
8740 {
8741 /* Find the highest point of the newly-added tree,
8742 mark each node along the way, and walk from there. */
8743 parent->die_mark = 1;
8744 while (parent->die_parent
8745 && parent->die_parent->die_mark == 0)
8746 {
8747 parent = parent->die_parent;
8748 parent->die_mark = 1;
8749 }
8750 copy_decls_walk (unit, parent, decl_table);
8751 }
8752 }
8753 }
8754 }
8755
8756 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8757 }
8758
8759 /* Copy declarations for "unworthy" types into the new comdat section.
8760 Incomplete types, modified types, and certain other types aren't broken
8761 out into comdat sections of their own, so they don't have a signature,
8762 and we need to copy the declaration into the same section so that we
8763 don't have an external reference. */
8764
8765 static void
8766 copy_decls_for_unworthy_types (dw_die_ref unit)
8767 {
8768 mark_dies (unit);
8769 decl_hash_type decl_table (10);
8770 copy_decls_walk (unit, unit, &decl_table);
8771 unmark_dies (unit);
8772 }
8773
8774 /* Traverse the DIE and add a sibling attribute if it may have the
8775 effect of speeding up access to siblings. To save some space,
8776 avoid generating sibling attributes for DIE's without children. */
8777
8778 static void
8779 add_sibling_attributes (dw_die_ref die)
8780 {
8781 dw_die_ref c;
8782
8783 if (! die->die_child)
8784 return;
8785
8786 if (die->die_parent && die != die->die_parent->die_child)
8787 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8788
8789 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8790 }
8791
8792 /* Output all location lists for the DIE and its children. */
8793
8794 static void
8795 output_location_lists (dw_die_ref die)
8796 {
8797 dw_die_ref c;
8798 dw_attr_node *a;
8799 unsigned ix;
8800
8801 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8802 if (AT_class (a) == dw_val_class_loc_list)
8803 output_loc_list (AT_loc_list (a));
8804
8805 FOR_EACH_CHILD (die, c, output_location_lists (c));
8806 }
8807
8808 /* During assign_location_list_indexes and output_loclists_offset the
8809 current index, after it the number of assigned indexes (i.e. how
8810 large the .debug_loclists* offset table should be). */
8811 static unsigned int loc_list_idx;
8812
8813 /* Output all location list offsets for the DIE and its children. */
8814
8815 static void
8816 output_loclists_offsets (dw_die_ref die)
8817 {
8818 dw_die_ref c;
8819 dw_attr_node *a;
8820 unsigned ix;
8821
8822 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8823 if (AT_class (a) == dw_val_class_loc_list)
8824 {
8825 dw_loc_list_ref l = AT_loc_list (a);
8826 if (l->offset_emitted)
8827 continue;
8828 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8829 loc_section_label, NULL);
8830 gcc_assert (l->hash == loc_list_idx);
8831 loc_list_idx++;
8832 l->offset_emitted = true;
8833 }
8834
8835 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8836 }
8837
8838 /* Recursively set indexes of location lists. */
8839
8840 static void
8841 assign_location_list_indexes (dw_die_ref die)
8842 {
8843 dw_die_ref c;
8844 dw_attr_node *a;
8845 unsigned ix;
8846
8847 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8848 if (AT_class (a) == dw_val_class_loc_list)
8849 {
8850 dw_loc_list_ref list = AT_loc_list (a);
8851 if (!list->num_assigned)
8852 {
8853 list->num_assigned = true;
8854 list->hash = loc_list_idx++;
8855 }
8856 }
8857
8858 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8859 }
8860
8861 /* We want to limit the number of external references, because they are
8862 larger than local references: a relocation takes multiple words, and
8863 even a sig8 reference is always eight bytes, whereas a local reference
8864 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8865 So if we encounter multiple external references to the same type DIE, we
8866 make a local typedef stub for it and redirect all references there.
8867
8868 This is the element of the hash table for keeping track of these
8869 references. */
8870
8871 struct external_ref
8872 {
8873 dw_die_ref type;
8874 dw_die_ref stub;
8875 unsigned n_refs;
8876 };
8877
8878 /* Hashtable helpers. */
8879
8880 struct external_ref_hasher : free_ptr_hash <external_ref>
8881 {
8882 static inline hashval_t hash (const external_ref *);
8883 static inline bool equal (const external_ref *, const external_ref *);
8884 };
8885
8886 inline hashval_t
8887 external_ref_hasher::hash (const external_ref *r)
8888 {
8889 dw_die_ref die = r->type;
8890 hashval_t h = 0;
8891
8892 /* We can't use the address of the DIE for hashing, because
8893 that will make the order of the stub DIEs non-deterministic. */
8894 if (! die->comdat_type_p)
8895 /* We have a symbol; use it to compute a hash. */
8896 h = htab_hash_string (die->die_id.die_symbol);
8897 else
8898 {
8899 /* We have a type signature; use a subset of the bits as the hash.
8900 The 8-byte signature is at least as large as hashval_t. */
8901 comdat_type_node *type_node = die->die_id.die_type_node;
8902 memcpy (&h, type_node->signature, sizeof (h));
8903 }
8904 return h;
8905 }
8906
8907 inline bool
8908 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8909 {
8910 return r1->type == r2->type;
8911 }
8912
8913 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8914
8915 /* Return a pointer to the external_ref for references to DIE. */
8916
8917 static struct external_ref *
8918 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8919 {
8920 struct external_ref ref, *ref_p;
8921 external_ref **slot;
8922
8923 ref.type = die;
8924 slot = map->find_slot (&ref, INSERT);
8925 if (*slot != HTAB_EMPTY_ENTRY)
8926 return *slot;
8927
8928 ref_p = XCNEW (struct external_ref);
8929 ref_p->type = die;
8930 *slot = ref_p;
8931 return ref_p;
8932 }
8933
8934 /* Subroutine of optimize_external_refs, below.
8935
8936 If we see a type skeleton, record it as our stub. If we see external
8937 references, remember how many we've seen. */
8938
8939 static void
8940 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8941 {
8942 dw_die_ref c;
8943 dw_attr_node *a;
8944 unsigned ix;
8945 struct external_ref *ref_p;
8946
8947 if (is_type_die (die)
8948 && (c = get_AT_ref (die, DW_AT_signature)))
8949 {
8950 /* This is a local skeleton; use it for local references. */
8951 ref_p = lookup_external_ref (map, c);
8952 ref_p->stub = die;
8953 }
8954
8955 /* Scan the DIE references, and remember any that refer to DIEs from
8956 other CUs (i.e. those which are not marked). */
8957 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8958 if (AT_class (a) == dw_val_class_die_ref
8959 && (c = AT_ref (a))->die_mark == 0
8960 && is_type_die (c))
8961 {
8962 ref_p = lookup_external_ref (map, c);
8963 ref_p->n_refs++;
8964 }
8965
8966 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8967 }
8968
8969 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8970 points to an external_ref, DATA is the CU we're processing. If we don't
8971 already have a local stub, and we have multiple refs, build a stub. */
8972
8973 int
8974 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8975 {
8976 struct external_ref *ref_p = *slot;
8977
8978 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8979 {
8980 /* We have multiple references to this type, so build a small stub.
8981 Both of these forms are a bit dodgy from the perspective of the
8982 DWARF standard, since technically they should have names. */
8983 dw_die_ref cu = data;
8984 dw_die_ref type = ref_p->type;
8985 dw_die_ref stub = NULL;
8986
8987 if (type->comdat_type_p)
8988 {
8989 /* If we refer to this type via sig8, use AT_signature. */
8990 stub = new_die (type->die_tag, cu, NULL_TREE);
8991 add_AT_die_ref (stub, DW_AT_signature, type);
8992 }
8993 else
8994 {
8995 /* Otherwise, use a typedef with no name. */
8996 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8997 add_AT_die_ref (stub, DW_AT_type, type);
8998 }
8999
9000 stub->die_mark++;
9001 ref_p->stub = stub;
9002 }
9003 return 1;
9004 }
9005
9006 /* DIE is a unit; look through all the DIE references to see if there are
9007 any external references to types, and if so, create local stubs for
9008 them which will be applied in build_abbrev_table. This is useful because
9009 references to local DIEs are smaller. */
9010
9011 static external_ref_hash_type *
9012 optimize_external_refs (dw_die_ref die)
9013 {
9014 external_ref_hash_type *map = new external_ref_hash_type (10);
9015 optimize_external_refs_1 (die, map);
9016 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9017 return map;
9018 }
9019
9020 /* The following 3 variables are temporaries that are computed only during the
9021 build_abbrev_table call and used and released during the following
9022 optimize_abbrev_table call. */
9023
9024 /* First abbrev_id that can be optimized based on usage. */
9025 static unsigned int abbrev_opt_start;
9026
9027 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9028 abbrev_id smaller than this, because they must be already sized
9029 during build_abbrev_table). */
9030 static unsigned int abbrev_opt_base_type_end;
9031
9032 /* Vector of usage counts during build_abbrev_table. Indexed by
9033 abbrev_id - abbrev_opt_start. */
9034 static vec<unsigned int> abbrev_usage_count;
9035
9036 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9037 static vec<dw_die_ref> sorted_abbrev_dies;
9038
9039 /* The format of each DIE (and its attribute value pairs) is encoded in an
9040 abbreviation table. This routine builds the abbreviation table and assigns
9041 a unique abbreviation id for each abbreviation entry. The children of each
9042 die are visited recursively. */
9043
9044 static void
9045 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9046 {
9047 unsigned int abbrev_id = 0;
9048 dw_die_ref c;
9049 dw_attr_node *a;
9050 unsigned ix;
9051 dw_die_ref abbrev;
9052
9053 /* Scan the DIE references, and replace any that refer to
9054 DIEs from other CUs (i.e. those which are not marked) with
9055 the local stubs we built in optimize_external_refs. */
9056 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9057 if (AT_class (a) == dw_val_class_die_ref
9058 && (c = AT_ref (a))->die_mark == 0)
9059 {
9060 struct external_ref *ref_p;
9061 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9062
9063 ref_p = lookup_external_ref (extern_map, c);
9064 if (ref_p->stub && ref_p->stub != die)
9065 change_AT_die_ref (a, ref_p->stub);
9066 else
9067 /* We aren't changing this reference, so mark it external. */
9068 set_AT_ref_external (a, 1);
9069 }
9070
9071 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9072 {
9073 dw_attr_node *die_a, *abbrev_a;
9074 unsigned ix;
9075 bool ok = true;
9076
9077 if (abbrev_id == 0)
9078 continue;
9079 if (abbrev->die_tag != die->die_tag)
9080 continue;
9081 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9082 continue;
9083
9084 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9085 continue;
9086
9087 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9088 {
9089 abbrev_a = &(*abbrev->die_attr)[ix];
9090 if ((abbrev_a->dw_attr != die_a->dw_attr)
9091 || (value_format (abbrev_a) != value_format (die_a)))
9092 {
9093 ok = false;
9094 break;
9095 }
9096 }
9097 if (ok)
9098 break;
9099 }
9100
9101 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9102 {
9103 vec_safe_push (abbrev_die_table, die);
9104 if (abbrev_opt_start)
9105 abbrev_usage_count.safe_push (0);
9106 }
9107 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9108 {
9109 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9110 sorted_abbrev_dies.safe_push (die);
9111 }
9112
9113 die->die_abbrev = abbrev_id;
9114 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9115 }
9116
9117 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9118 by die_abbrev's usage count, from the most commonly used
9119 abbreviation to the least. */
9120
9121 static int
9122 die_abbrev_cmp (const void *p1, const void *p2)
9123 {
9124 dw_die_ref die1 = *(const dw_die_ref *) p1;
9125 dw_die_ref die2 = *(const dw_die_ref *) p2;
9126
9127 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9128 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9129
9130 if (die1->die_abbrev >= abbrev_opt_base_type_end
9131 && die2->die_abbrev >= abbrev_opt_base_type_end)
9132 {
9133 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9134 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9135 return -1;
9136 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9137 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9138 return 1;
9139 }
9140
9141 /* Stabilize the sort. */
9142 if (die1->die_abbrev < die2->die_abbrev)
9143 return -1;
9144 if (die1->die_abbrev > die2->die_abbrev)
9145 return 1;
9146
9147 return 0;
9148 }
9149
9150 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9151 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9152 into dw_val_class_const_implicit or
9153 dw_val_class_unsigned_const_implicit. */
9154
9155 static void
9156 optimize_implicit_const (unsigned int first_id, unsigned int end,
9157 vec<bool> &implicit_consts)
9158 {
9159 /* It never makes sense if there is just one DIE using the abbreviation. */
9160 if (end < first_id + 2)
9161 return;
9162
9163 dw_attr_node *a;
9164 unsigned ix, i;
9165 dw_die_ref die = sorted_abbrev_dies[first_id];
9166 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9167 if (implicit_consts[ix])
9168 {
9169 enum dw_val_class new_class = dw_val_class_none;
9170 switch (AT_class (a))
9171 {
9172 case dw_val_class_unsigned_const:
9173 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9174 continue;
9175
9176 /* The .debug_abbrev section will grow by
9177 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9178 in all the DIEs using that abbreviation. */
9179 if (constant_size (AT_unsigned (a)) * (end - first_id)
9180 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9181 continue;
9182
9183 new_class = dw_val_class_unsigned_const_implicit;
9184 break;
9185
9186 case dw_val_class_const:
9187 new_class = dw_val_class_const_implicit;
9188 break;
9189
9190 case dw_val_class_file:
9191 new_class = dw_val_class_file_implicit;
9192 break;
9193
9194 default:
9195 continue;
9196 }
9197 for (i = first_id; i < end; i++)
9198 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9199 = new_class;
9200 }
9201 }
9202
9203 /* Attempt to optimize abbreviation table from abbrev_opt_start
9204 abbreviation above. */
9205
9206 static void
9207 optimize_abbrev_table (void)
9208 {
9209 if (abbrev_opt_start
9210 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9211 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9212 {
9213 auto_vec<bool, 32> implicit_consts;
9214 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9215
9216 unsigned int abbrev_id = abbrev_opt_start - 1;
9217 unsigned int first_id = ~0U;
9218 unsigned int last_abbrev_id = 0;
9219 unsigned int i;
9220 dw_die_ref die;
9221 if (abbrev_opt_base_type_end > abbrev_opt_start)
9222 abbrev_id = abbrev_opt_base_type_end - 1;
9223 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9224 most commonly used abbreviations come first. */
9225 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9226 {
9227 dw_attr_node *a;
9228 unsigned ix;
9229
9230 /* If calc_base_type_die_sizes has been called, the CU and
9231 base types after it can't be optimized, because we've already
9232 calculated their DIE offsets. We've sorted them first. */
9233 if (die->die_abbrev < abbrev_opt_base_type_end)
9234 continue;
9235 if (die->die_abbrev != last_abbrev_id)
9236 {
9237 last_abbrev_id = die->die_abbrev;
9238 if (dwarf_version >= 5 && first_id != ~0U)
9239 optimize_implicit_const (first_id, i, implicit_consts);
9240 abbrev_id++;
9241 (*abbrev_die_table)[abbrev_id] = die;
9242 if (dwarf_version >= 5)
9243 {
9244 first_id = i;
9245 implicit_consts.truncate (0);
9246
9247 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9248 switch (AT_class (a))
9249 {
9250 case dw_val_class_const:
9251 case dw_val_class_unsigned_const:
9252 case dw_val_class_file:
9253 implicit_consts.safe_push (true);
9254 break;
9255 default:
9256 implicit_consts.safe_push (false);
9257 break;
9258 }
9259 }
9260 }
9261 else if (dwarf_version >= 5)
9262 {
9263 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9264 if (!implicit_consts[ix])
9265 continue;
9266 else
9267 {
9268 dw_attr_node *other_a
9269 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9270 if (!dw_val_equal_p (&a->dw_attr_val,
9271 &other_a->dw_attr_val))
9272 implicit_consts[ix] = false;
9273 }
9274 }
9275 die->die_abbrev = abbrev_id;
9276 }
9277 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9278 if (dwarf_version >= 5 && first_id != ~0U)
9279 optimize_implicit_const (first_id, i, implicit_consts);
9280 }
9281
9282 abbrev_opt_start = 0;
9283 abbrev_opt_base_type_end = 0;
9284 abbrev_usage_count.release ();
9285 sorted_abbrev_dies.release ();
9286 }
9287 \f
9288 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9289
9290 static int
9291 constant_size (unsigned HOST_WIDE_INT value)
9292 {
9293 int log;
9294
9295 if (value == 0)
9296 log = 0;
9297 else
9298 log = floor_log2 (value);
9299
9300 log = log / 8;
9301 log = 1 << (floor_log2 (log) + 1);
9302
9303 return log;
9304 }
9305
9306 /* Return the size of a DIE as it is represented in the
9307 .debug_info section. */
9308
9309 static unsigned long
9310 size_of_die (dw_die_ref die)
9311 {
9312 unsigned long size = 0;
9313 dw_attr_node *a;
9314 unsigned ix;
9315 enum dwarf_form form;
9316
9317 size += size_of_uleb128 (die->die_abbrev);
9318 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9319 {
9320 switch (AT_class (a))
9321 {
9322 case dw_val_class_addr:
9323 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9324 {
9325 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9326 size += size_of_uleb128 (AT_index (a));
9327 }
9328 else
9329 size += DWARF2_ADDR_SIZE;
9330 break;
9331 case dw_val_class_offset:
9332 size += DWARF_OFFSET_SIZE;
9333 break;
9334 case dw_val_class_loc:
9335 {
9336 unsigned long lsize = size_of_locs (AT_loc (a));
9337
9338 /* Block length. */
9339 if (dwarf_version >= 4)
9340 size += size_of_uleb128 (lsize);
9341 else
9342 size += constant_size (lsize);
9343 size += lsize;
9344 }
9345 break;
9346 case dw_val_class_loc_list:
9347 case dw_val_class_view_list:
9348 if (dwarf_split_debug_info && dwarf_version >= 5)
9349 {
9350 gcc_assert (AT_loc_list (a)->num_assigned);
9351 size += size_of_uleb128 (AT_loc_list (a)->hash);
9352 }
9353 else
9354 size += DWARF_OFFSET_SIZE;
9355 break;
9356 case dw_val_class_range_list:
9357 if (value_format (a) == DW_FORM_rnglistx)
9358 {
9359 gcc_assert (rnglist_idx);
9360 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9361 size += size_of_uleb128 (r->idx);
9362 }
9363 else
9364 size += DWARF_OFFSET_SIZE;
9365 break;
9366 case dw_val_class_const:
9367 size += size_of_sleb128 (AT_int (a));
9368 break;
9369 case dw_val_class_unsigned_const:
9370 {
9371 int csize = constant_size (AT_unsigned (a));
9372 if (dwarf_version == 3
9373 && a->dw_attr == DW_AT_data_member_location
9374 && csize >= 4)
9375 size += size_of_uleb128 (AT_unsigned (a));
9376 else
9377 size += csize;
9378 }
9379 break;
9380 case dw_val_class_symview:
9381 if (symview_upper_bound <= 0xff)
9382 size += 1;
9383 else if (symview_upper_bound <= 0xffff)
9384 size += 2;
9385 else if (symview_upper_bound <= 0xffffffff)
9386 size += 4;
9387 else
9388 size += 8;
9389 break;
9390 case dw_val_class_const_implicit:
9391 case dw_val_class_unsigned_const_implicit:
9392 case dw_val_class_file_implicit:
9393 /* These occupy no size in the DIE, just an extra sleb128 in
9394 .debug_abbrev. */
9395 break;
9396 case dw_val_class_const_double:
9397 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9398 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9399 size++; /* block */
9400 break;
9401 case dw_val_class_wide_int:
9402 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9403 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9404 if (get_full_len (*a->dw_attr_val.v.val_wide)
9405 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9406 size++; /* block */
9407 break;
9408 case dw_val_class_vec:
9409 size += constant_size (a->dw_attr_val.v.val_vec.length
9410 * a->dw_attr_val.v.val_vec.elt_size)
9411 + a->dw_attr_val.v.val_vec.length
9412 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9413 break;
9414 case dw_val_class_flag:
9415 if (dwarf_version >= 4)
9416 /* Currently all add_AT_flag calls pass in 1 as last argument,
9417 so DW_FORM_flag_present can be used. If that ever changes,
9418 we'll need to use DW_FORM_flag and have some optimization
9419 in build_abbrev_table that will change those to
9420 DW_FORM_flag_present if it is set to 1 in all DIEs using
9421 the same abbrev entry. */
9422 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9423 else
9424 size += 1;
9425 break;
9426 case dw_val_class_die_ref:
9427 if (AT_ref_external (a))
9428 {
9429 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9430 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9431 is sized by target address length, whereas in DWARF3
9432 it's always sized as an offset. */
9433 if (use_debug_types)
9434 size += DWARF_TYPE_SIGNATURE_SIZE;
9435 else if (dwarf_version == 2)
9436 size += DWARF2_ADDR_SIZE;
9437 else
9438 size += DWARF_OFFSET_SIZE;
9439 }
9440 else
9441 size += DWARF_OFFSET_SIZE;
9442 break;
9443 case dw_val_class_fde_ref:
9444 size += DWARF_OFFSET_SIZE;
9445 break;
9446 case dw_val_class_lbl_id:
9447 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9448 {
9449 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9450 size += size_of_uleb128 (AT_index (a));
9451 }
9452 else
9453 size += DWARF2_ADDR_SIZE;
9454 break;
9455 case dw_val_class_lineptr:
9456 case dw_val_class_macptr:
9457 case dw_val_class_loclistsptr:
9458 size += DWARF_OFFSET_SIZE;
9459 break;
9460 case dw_val_class_str:
9461 form = AT_string_form (a);
9462 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9463 size += DWARF_OFFSET_SIZE;
9464 else if (form == dwarf_FORM (DW_FORM_strx))
9465 size += size_of_uleb128 (AT_index (a));
9466 else
9467 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9468 break;
9469 case dw_val_class_file:
9470 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9471 break;
9472 case dw_val_class_data8:
9473 size += 8;
9474 break;
9475 case dw_val_class_vms_delta:
9476 size += DWARF_OFFSET_SIZE;
9477 break;
9478 case dw_val_class_high_pc:
9479 size += DWARF2_ADDR_SIZE;
9480 break;
9481 case dw_val_class_discr_value:
9482 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9483 break;
9484 case dw_val_class_discr_list:
9485 {
9486 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9487
9488 /* This is a block, so we have the block length and then its
9489 data. */
9490 size += constant_size (block_size) + block_size;
9491 }
9492 break;
9493 default:
9494 gcc_unreachable ();
9495 }
9496 }
9497
9498 return size;
9499 }
9500
9501 /* Size the debugging information associated with a given DIE. Visits the
9502 DIE's children recursively. Updates the global variable next_die_offset, on
9503 each time through. Uses the current value of next_die_offset to update the
9504 die_offset field in each DIE. */
9505
9506 static void
9507 calc_die_sizes (dw_die_ref die)
9508 {
9509 dw_die_ref c;
9510
9511 gcc_assert (die->die_offset == 0
9512 || (unsigned long int) die->die_offset == next_die_offset);
9513 die->die_offset = next_die_offset;
9514 next_die_offset += size_of_die (die);
9515
9516 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9517
9518 if (die->die_child != NULL)
9519 /* Count the null byte used to terminate sibling lists. */
9520 next_die_offset += 1;
9521 }
9522
9523 /* Size just the base type children at the start of the CU.
9524 This is needed because build_abbrev needs to size locs
9525 and sizing of type based stack ops needs to know die_offset
9526 values for the base types. */
9527
9528 static void
9529 calc_base_type_die_sizes (void)
9530 {
9531 unsigned long die_offset = (dwarf_split_debug_info
9532 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9533 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9534 unsigned int i;
9535 dw_die_ref base_type;
9536 #if ENABLE_ASSERT_CHECKING
9537 dw_die_ref prev = comp_unit_die ()->die_child;
9538 #endif
9539
9540 die_offset += size_of_die (comp_unit_die ());
9541 for (i = 0; base_types.iterate (i, &base_type); i++)
9542 {
9543 #if ENABLE_ASSERT_CHECKING
9544 gcc_assert (base_type->die_offset == 0
9545 && prev->die_sib == base_type
9546 && base_type->die_child == NULL
9547 && base_type->die_abbrev);
9548 prev = base_type;
9549 #endif
9550 if (abbrev_opt_start
9551 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9552 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9553 base_type->die_offset = die_offset;
9554 die_offset += size_of_die (base_type);
9555 }
9556 }
9557
9558 /* Set the marks for a die and its children. We do this so
9559 that we know whether or not a reference needs to use FORM_ref_addr; only
9560 DIEs in the same CU will be marked. We used to clear out the offset
9561 and use that as the flag, but ran into ordering problems. */
9562
9563 static void
9564 mark_dies (dw_die_ref die)
9565 {
9566 dw_die_ref c;
9567
9568 gcc_assert (!die->die_mark);
9569
9570 die->die_mark = 1;
9571 FOR_EACH_CHILD (die, c, mark_dies (c));
9572 }
9573
9574 /* Clear the marks for a die and its children. */
9575
9576 static void
9577 unmark_dies (dw_die_ref die)
9578 {
9579 dw_die_ref c;
9580
9581 if (! use_debug_types)
9582 gcc_assert (die->die_mark);
9583
9584 die->die_mark = 0;
9585 FOR_EACH_CHILD (die, c, unmark_dies (c));
9586 }
9587
9588 /* Clear the marks for a die, its children and referred dies. */
9589
9590 static void
9591 unmark_all_dies (dw_die_ref die)
9592 {
9593 dw_die_ref c;
9594 dw_attr_node *a;
9595 unsigned ix;
9596
9597 if (!die->die_mark)
9598 return;
9599 die->die_mark = 0;
9600
9601 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9602
9603 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9604 if (AT_class (a) == dw_val_class_die_ref)
9605 unmark_all_dies (AT_ref (a));
9606 }
9607
9608 /* Calculate if the entry should appear in the final output file. It may be
9609 from a pruned a type. */
9610
9611 static bool
9612 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9613 {
9614 /* By limiting gnu pubnames to definitions only, gold can generate a
9615 gdb index without entries for declarations, which don't include
9616 enough information to be useful. */
9617 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9618 return false;
9619
9620 if (table == pubname_table)
9621 {
9622 /* Enumerator names are part of the pubname table, but the
9623 parent DW_TAG_enumeration_type die may have been pruned.
9624 Don't output them if that is the case. */
9625 if (p->die->die_tag == DW_TAG_enumerator &&
9626 (p->die->die_parent == NULL
9627 || !p->die->die_parent->die_perennial_p))
9628 return false;
9629
9630 /* Everything else in the pubname table is included. */
9631 return true;
9632 }
9633
9634 /* The pubtypes table shouldn't include types that have been
9635 pruned. */
9636 return (p->die->die_offset != 0
9637 || !flag_eliminate_unused_debug_types);
9638 }
9639
9640 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9641 generated for the compilation unit. */
9642
9643 static unsigned long
9644 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9645 {
9646 unsigned long size;
9647 unsigned i;
9648 pubname_entry *p;
9649 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9650
9651 size = DWARF_PUBNAMES_HEADER_SIZE;
9652 FOR_EACH_VEC_ELT (*names, i, p)
9653 if (include_pubname_in_output (names, p))
9654 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9655
9656 size += DWARF_OFFSET_SIZE;
9657 return size;
9658 }
9659
9660 /* Return the size of the information in the .debug_aranges section. */
9661
9662 static unsigned long
9663 size_of_aranges (void)
9664 {
9665 unsigned long size;
9666
9667 size = DWARF_ARANGES_HEADER_SIZE;
9668
9669 /* Count the address/length pair for this compilation unit. */
9670 if (text_section_used)
9671 size += 2 * DWARF2_ADDR_SIZE;
9672 if (cold_text_section_used)
9673 size += 2 * DWARF2_ADDR_SIZE;
9674 if (have_multiple_function_sections)
9675 {
9676 unsigned fde_idx;
9677 dw_fde_ref fde;
9678
9679 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9680 {
9681 if (DECL_IGNORED_P (fde->decl))
9682 continue;
9683 if (!fde->in_std_section)
9684 size += 2 * DWARF2_ADDR_SIZE;
9685 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9686 size += 2 * DWARF2_ADDR_SIZE;
9687 }
9688 }
9689
9690 /* Count the two zero words used to terminated the address range table. */
9691 size += 2 * DWARF2_ADDR_SIZE;
9692 return size;
9693 }
9694 \f
9695 /* Select the encoding of an attribute value. */
9696
9697 static enum dwarf_form
9698 value_format (dw_attr_node *a)
9699 {
9700 switch (AT_class (a))
9701 {
9702 case dw_val_class_addr:
9703 /* Only very few attributes allow DW_FORM_addr. */
9704 switch (a->dw_attr)
9705 {
9706 case DW_AT_low_pc:
9707 case DW_AT_high_pc:
9708 case DW_AT_entry_pc:
9709 case DW_AT_trampoline:
9710 return (AT_index (a) == NOT_INDEXED
9711 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9712 default:
9713 break;
9714 }
9715 switch (DWARF2_ADDR_SIZE)
9716 {
9717 case 1:
9718 return DW_FORM_data1;
9719 case 2:
9720 return DW_FORM_data2;
9721 case 4:
9722 return DW_FORM_data4;
9723 case 8:
9724 return DW_FORM_data8;
9725 default:
9726 gcc_unreachable ();
9727 }
9728 case dw_val_class_loc_list:
9729 case dw_val_class_view_list:
9730 if (dwarf_split_debug_info
9731 && dwarf_version >= 5
9732 && AT_loc_list (a)->num_assigned)
9733 return DW_FORM_loclistx;
9734 /* FALLTHRU */
9735 case dw_val_class_range_list:
9736 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9737 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9738 care about sizes of .debug* sections in shared libraries and
9739 executables and don't take into account relocations that affect just
9740 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9741 table in the .debug_rnglists section. */
9742 if (dwarf_split_debug_info
9743 && dwarf_version >= 5
9744 && AT_class (a) == dw_val_class_range_list
9745 && rnglist_idx
9746 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9747 return DW_FORM_rnglistx;
9748 if (dwarf_version >= 4)
9749 return DW_FORM_sec_offset;
9750 /* FALLTHRU */
9751 case dw_val_class_vms_delta:
9752 case dw_val_class_offset:
9753 switch (DWARF_OFFSET_SIZE)
9754 {
9755 case 4:
9756 return DW_FORM_data4;
9757 case 8:
9758 return DW_FORM_data8;
9759 default:
9760 gcc_unreachable ();
9761 }
9762 case dw_val_class_loc:
9763 if (dwarf_version >= 4)
9764 return DW_FORM_exprloc;
9765 switch (constant_size (size_of_locs (AT_loc (a))))
9766 {
9767 case 1:
9768 return DW_FORM_block1;
9769 case 2:
9770 return DW_FORM_block2;
9771 case 4:
9772 return DW_FORM_block4;
9773 default:
9774 gcc_unreachable ();
9775 }
9776 case dw_val_class_const:
9777 return DW_FORM_sdata;
9778 case dw_val_class_unsigned_const:
9779 switch (constant_size (AT_unsigned (a)))
9780 {
9781 case 1:
9782 return DW_FORM_data1;
9783 case 2:
9784 return DW_FORM_data2;
9785 case 4:
9786 /* In DWARF3 DW_AT_data_member_location with
9787 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9788 constant, so we need to use DW_FORM_udata if we need
9789 a large constant. */
9790 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9791 return DW_FORM_udata;
9792 return DW_FORM_data4;
9793 case 8:
9794 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9795 return DW_FORM_udata;
9796 return DW_FORM_data8;
9797 default:
9798 gcc_unreachable ();
9799 }
9800 case dw_val_class_const_implicit:
9801 case dw_val_class_unsigned_const_implicit:
9802 case dw_val_class_file_implicit:
9803 return DW_FORM_implicit_const;
9804 case dw_val_class_const_double:
9805 switch (HOST_BITS_PER_WIDE_INT)
9806 {
9807 case 8:
9808 return DW_FORM_data2;
9809 case 16:
9810 return DW_FORM_data4;
9811 case 32:
9812 return DW_FORM_data8;
9813 case 64:
9814 if (dwarf_version >= 5)
9815 return DW_FORM_data16;
9816 /* FALLTHRU */
9817 default:
9818 return DW_FORM_block1;
9819 }
9820 case dw_val_class_wide_int:
9821 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9822 {
9823 case 8:
9824 return DW_FORM_data1;
9825 case 16:
9826 return DW_FORM_data2;
9827 case 32:
9828 return DW_FORM_data4;
9829 case 64:
9830 return DW_FORM_data8;
9831 case 128:
9832 if (dwarf_version >= 5)
9833 return DW_FORM_data16;
9834 /* FALLTHRU */
9835 default:
9836 return DW_FORM_block1;
9837 }
9838 case dw_val_class_symview:
9839 /* ??? We might use uleb128, but then we'd have to compute
9840 .debug_info offsets in the assembler. */
9841 if (symview_upper_bound <= 0xff)
9842 return DW_FORM_data1;
9843 else if (symview_upper_bound <= 0xffff)
9844 return DW_FORM_data2;
9845 else if (symview_upper_bound <= 0xffffffff)
9846 return DW_FORM_data4;
9847 else
9848 return DW_FORM_data8;
9849 case dw_val_class_vec:
9850 switch (constant_size (a->dw_attr_val.v.val_vec.length
9851 * a->dw_attr_val.v.val_vec.elt_size))
9852 {
9853 case 1:
9854 return DW_FORM_block1;
9855 case 2:
9856 return DW_FORM_block2;
9857 case 4:
9858 return DW_FORM_block4;
9859 default:
9860 gcc_unreachable ();
9861 }
9862 case dw_val_class_flag:
9863 if (dwarf_version >= 4)
9864 {
9865 /* Currently all add_AT_flag calls pass in 1 as last argument,
9866 so DW_FORM_flag_present can be used. If that ever changes,
9867 we'll need to use DW_FORM_flag and have some optimization
9868 in build_abbrev_table that will change those to
9869 DW_FORM_flag_present if it is set to 1 in all DIEs using
9870 the same abbrev entry. */
9871 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9872 return DW_FORM_flag_present;
9873 }
9874 return DW_FORM_flag;
9875 case dw_val_class_die_ref:
9876 if (AT_ref_external (a))
9877 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9878 else
9879 return DW_FORM_ref;
9880 case dw_val_class_fde_ref:
9881 return DW_FORM_data;
9882 case dw_val_class_lbl_id:
9883 return (AT_index (a) == NOT_INDEXED
9884 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9885 case dw_val_class_lineptr:
9886 case dw_val_class_macptr:
9887 case dw_val_class_loclistsptr:
9888 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9889 case dw_val_class_str:
9890 return AT_string_form (a);
9891 case dw_val_class_file:
9892 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9893 {
9894 case 1:
9895 return DW_FORM_data1;
9896 case 2:
9897 return DW_FORM_data2;
9898 case 4:
9899 return DW_FORM_data4;
9900 default:
9901 gcc_unreachable ();
9902 }
9903
9904 case dw_val_class_data8:
9905 return DW_FORM_data8;
9906
9907 case dw_val_class_high_pc:
9908 switch (DWARF2_ADDR_SIZE)
9909 {
9910 case 1:
9911 return DW_FORM_data1;
9912 case 2:
9913 return DW_FORM_data2;
9914 case 4:
9915 return DW_FORM_data4;
9916 case 8:
9917 return DW_FORM_data8;
9918 default:
9919 gcc_unreachable ();
9920 }
9921
9922 case dw_val_class_discr_value:
9923 return (a->dw_attr_val.v.val_discr_value.pos
9924 ? DW_FORM_udata
9925 : DW_FORM_sdata);
9926 case dw_val_class_discr_list:
9927 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9928 {
9929 case 1:
9930 return DW_FORM_block1;
9931 case 2:
9932 return DW_FORM_block2;
9933 case 4:
9934 return DW_FORM_block4;
9935 default:
9936 gcc_unreachable ();
9937 }
9938
9939 default:
9940 gcc_unreachable ();
9941 }
9942 }
9943
9944 /* Output the encoding of an attribute value. */
9945
9946 static void
9947 output_value_format (dw_attr_node *a)
9948 {
9949 enum dwarf_form form = value_format (a);
9950
9951 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9952 }
9953
9954 /* Given a die and id, produce the appropriate abbreviations. */
9955
9956 static void
9957 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9958 {
9959 unsigned ix;
9960 dw_attr_node *a_attr;
9961
9962 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9963 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9964 dwarf_tag_name (abbrev->die_tag));
9965
9966 if (abbrev->die_child != NULL)
9967 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9968 else
9969 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9970
9971 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9972 {
9973 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9974 dwarf_attr_name (a_attr->dw_attr));
9975 output_value_format (a_attr);
9976 if (value_format (a_attr) == DW_FORM_implicit_const)
9977 {
9978 if (AT_class (a_attr) == dw_val_class_file_implicit)
9979 {
9980 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9981 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9982 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9983 }
9984 else
9985 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9986 }
9987 }
9988
9989 dw2_asm_output_data (1, 0, NULL);
9990 dw2_asm_output_data (1, 0, NULL);
9991 }
9992
9993
9994 /* Output the .debug_abbrev section which defines the DIE abbreviation
9995 table. */
9996
9997 static void
9998 output_abbrev_section (void)
9999 {
10000 unsigned int abbrev_id;
10001 dw_die_ref abbrev;
10002
10003 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
10004 if (abbrev_id != 0)
10005 output_die_abbrevs (abbrev_id, abbrev);
10006
10007 /* Terminate the table. */
10008 dw2_asm_output_data (1, 0, NULL);
10009 }
10010
10011 /* Return a new location list, given the begin and end range, and the
10012 expression. */
10013
10014 static inline dw_loc_list_ref
10015 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10016 const char *end, var_loc_view vend,
10017 const char *section)
10018 {
10019 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10020
10021 retlist->begin = begin;
10022 retlist->begin_entry = NULL;
10023 retlist->end = end;
10024 retlist->expr = expr;
10025 retlist->section = section;
10026 retlist->vbegin = vbegin;
10027 retlist->vend = vend;
10028
10029 return retlist;
10030 }
10031
10032 /* Return true iff there's any nonzero view number in the loc list. */
10033
10034 static bool
10035 loc_list_has_views (dw_loc_list_ref list)
10036 {
10037 if (!debug_variable_location_views)
10038 return false;
10039
10040 for (dw_loc_list_ref loc = list;
10041 loc != NULL; loc = loc->dw_loc_next)
10042 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10043 return true;
10044
10045 return false;
10046 }
10047
10048 /* Generate a new internal symbol for this location list node, if it
10049 hasn't got one yet. */
10050
10051 static inline void
10052 gen_llsym (dw_loc_list_ref list)
10053 {
10054 gcc_assert (!list->ll_symbol);
10055 list->ll_symbol = gen_internal_sym ("LLST");
10056
10057 if (!loc_list_has_views (list))
10058 return;
10059
10060 if (dwarf2out_locviews_in_attribute ())
10061 {
10062 /* Use the same label_num for the view list. */
10063 label_num--;
10064 list->vl_symbol = gen_internal_sym ("LVUS");
10065 }
10066 else
10067 list->vl_symbol = list->ll_symbol;
10068 }
10069
10070 /* Generate a symbol for the list, but only if we really want to emit
10071 it as a list. */
10072
10073 static inline void
10074 maybe_gen_llsym (dw_loc_list_ref list)
10075 {
10076 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10077 return;
10078
10079 gen_llsym (list);
10080 }
10081
10082 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10083 NULL, don't consider size of the location expression. If we're not
10084 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10085 representation in *SIZEP. */
10086
10087 static bool
10088 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10089 {
10090 /* Don't output an entry that starts and ends at the same address. */
10091 if (strcmp (curr->begin, curr->end) == 0
10092 && curr->vbegin == curr->vend && !curr->force)
10093 return true;
10094
10095 if (!sizep)
10096 return false;
10097
10098 unsigned long size = size_of_locs (curr->expr);
10099
10100 /* If the expression is too large, drop it on the floor. We could
10101 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10102 in the expression, but >= 64KB expressions for a single value
10103 in a single range are unlikely very useful. */
10104 if (dwarf_version < 5 && size > 0xffff)
10105 return true;
10106
10107 *sizep = size;
10108
10109 return false;
10110 }
10111
10112 /* Output a view pair loclist entry for CURR, if it requires one. */
10113
10114 static void
10115 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10116 {
10117 if (!dwarf2out_locviews_in_loclist ())
10118 return;
10119
10120 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10121 return;
10122
10123 #ifdef DW_LLE_view_pair
10124 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10125
10126 if (dwarf2out_as_locview_support)
10127 {
10128 if (ZERO_VIEW_P (curr->vbegin))
10129 dw2_asm_output_data_uleb128 (0, "Location view begin");
10130 else
10131 {
10132 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10133 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10134 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10135 }
10136
10137 if (ZERO_VIEW_P (curr->vend))
10138 dw2_asm_output_data_uleb128 (0, "Location view end");
10139 else
10140 {
10141 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10142 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10143 dw2_asm_output_symname_uleb128 (label, "Location view end");
10144 }
10145 }
10146 else
10147 {
10148 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10149 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10150 }
10151 #endif /* DW_LLE_view_pair */
10152
10153 return;
10154 }
10155
10156 /* Output the location list given to us. */
10157
10158 static void
10159 output_loc_list (dw_loc_list_ref list_head)
10160 {
10161 int vcount = 0, lcount = 0;
10162
10163 if (list_head->emitted)
10164 return;
10165 list_head->emitted = true;
10166
10167 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10168 {
10169 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10170
10171 for (dw_loc_list_ref curr = list_head; curr != NULL;
10172 curr = curr->dw_loc_next)
10173 {
10174 unsigned long size;
10175
10176 if (skip_loc_list_entry (curr, &size))
10177 continue;
10178
10179 vcount++;
10180
10181 /* ?? dwarf_split_debug_info? */
10182 if (dwarf2out_as_locview_support)
10183 {
10184 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10185
10186 if (!ZERO_VIEW_P (curr->vbegin))
10187 {
10188 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10189 dw2_asm_output_symname_uleb128 (label,
10190 "View list begin (%s)",
10191 list_head->vl_symbol);
10192 }
10193 else
10194 dw2_asm_output_data_uleb128 (0,
10195 "View list begin (%s)",
10196 list_head->vl_symbol);
10197
10198 if (!ZERO_VIEW_P (curr->vend))
10199 {
10200 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10201 dw2_asm_output_symname_uleb128 (label,
10202 "View list end (%s)",
10203 list_head->vl_symbol);
10204 }
10205 else
10206 dw2_asm_output_data_uleb128 (0,
10207 "View list end (%s)",
10208 list_head->vl_symbol);
10209 }
10210 else
10211 {
10212 dw2_asm_output_data_uleb128 (curr->vbegin,
10213 "View list begin (%s)",
10214 list_head->vl_symbol);
10215 dw2_asm_output_data_uleb128 (curr->vend,
10216 "View list end (%s)",
10217 list_head->vl_symbol);
10218 }
10219 }
10220 }
10221
10222 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10223
10224 const char *last_section = NULL;
10225 const char *base_label = NULL;
10226
10227 /* Walk the location list, and output each range + expression. */
10228 for (dw_loc_list_ref curr = list_head; curr != NULL;
10229 curr = curr->dw_loc_next)
10230 {
10231 unsigned long size;
10232
10233 /* Skip this entry? If we skip it here, we must skip it in the
10234 view list above as well. */
10235 if (skip_loc_list_entry (curr, &size))
10236 continue;
10237
10238 lcount++;
10239
10240 if (dwarf_version >= 5)
10241 {
10242 if (dwarf_split_debug_info)
10243 {
10244 dwarf2out_maybe_output_loclist_view_pair (curr);
10245 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10246 uleb128 index into .debug_addr and uleb128 length. */
10247 dw2_asm_output_data (1, DW_LLE_startx_length,
10248 "DW_LLE_startx_length (%s)",
10249 list_head->ll_symbol);
10250 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10251 "Location list range start index "
10252 "(%s)", curr->begin);
10253 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10254 For that case we probably need to emit DW_LLE_startx_endx,
10255 but we'd need 2 .debug_addr entries rather than just one. */
10256 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10257 "Location list length (%s)",
10258 list_head->ll_symbol);
10259 }
10260 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10261 {
10262 dwarf2out_maybe_output_loclist_view_pair (curr);
10263 /* If all code is in .text section, the base address is
10264 already provided by the CU attributes. Use
10265 DW_LLE_offset_pair where both addresses are uleb128 encoded
10266 offsets against that base. */
10267 dw2_asm_output_data (1, DW_LLE_offset_pair,
10268 "DW_LLE_offset_pair (%s)",
10269 list_head->ll_symbol);
10270 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10271 "Location list begin address (%s)",
10272 list_head->ll_symbol);
10273 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10274 "Location list end address (%s)",
10275 list_head->ll_symbol);
10276 }
10277 else if (HAVE_AS_LEB128)
10278 {
10279 /* Otherwise, find out how many consecutive entries could share
10280 the same base entry. If just one, emit DW_LLE_start_length,
10281 otherwise emit DW_LLE_base_address for the base address
10282 followed by a series of DW_LLE_offset_pair. */
10283 if (last_section == NULL || curr->section != last_section)
10284 {
10285 dw_loc_list_ref curr2;
10286 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10287 curr2 = curr2->dw_loc_next)
10288 {
10289 if (strcmp (curr2->begin, curr2->end) == 0
10290 && !curr2->force)
10291 continue;
10292 break;
10293 }
10294 if (curr2 == NULL || curr->section != curr2->section)
10295 last_section = NULL;
10296 else
10297 {
10298 last_section = curr->section;
10299 base_label = curr->begin;
10300 dw2_asm_output_data (1, DW_LLE_base_address,
10301 "DW_LLE_base_address (%s)",
10302 list_head->ll_symbol);
10303 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10304 "Base address (%s)",
10305 list_head->ll_symbol);
10306 }
10307 }
10308 /* Only one entry with the same base address. Use
10309 DW_LLE_start_length with absolute address and uleb128
10310 length. */
10311 if (last_section == NULL)
10312 {
10313 dwarf2out_maybe_output_loclist_view_pair (curr);
10314 dw2_asm_output_data (1, DW_LLE_start_length,
10315 "DW_LLE_start_length (%s)",
10316 list_head->ll_symbol);
10317 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10318 "Location list begin address (%s)",
10319 list_head->ll_symbol);
10320 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10321 "Location list length "
10322 "(%s)", list_head->ll_symbol);
10323 }
10324 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10325 DW_LLE_base_address. */
10326 else
10327 {
10328 dwarf2out_maybe_output_loclist_view_pair (curr);
10329 dw2_asm_output_data (1, DW_LLE_offset_pair,
10330 "DW_LLE_offset_pair (%s)",
10331 list_head->ll_symbol);
10332 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10333 "Location list begin address "
10334 "(%s)", list_head->ll_symbol);
10335 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10336 "Location list end address "
10337 "(%s)", list_head->ll_symbol);
10338 }
10339 }
10340 /* The assembler does not support .uleb128 directive. Emit
10341 DW_LLE_start_end with a pair of absolute addresses. */
10342 else
10343 {
10344 dwarf2out_maybe_output_loclist_view_pair (curr);
10345 dw2_asm_output_data (1, DW_LLE_start_end,
10346 "DW_LLE_start_end (%s)",
10347 list_head->ll_symbol);
10348 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10349 "Location list begin address (%s)",
10350 list_head->ll_symbol);
10351 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10352 "Location list end address (%s)",
10353 list_head->ll_symbol);
10354 }
10355 }
10356 else if (dwarf_split_debug_info)
10357 {
10358 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10359 and 4 byte length. */
10360 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10361 "Location list start/length entry (%s)",
10362 list_head->ll_symbol);
10363 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10364 "Location list range start index (%s)",
10365 curr->begin);
10366 /* The length field is 4 bytes. If we ever need to support
10367 an 8-byte length, we can add a new DW_LLE code or fall back
10368 to DW_LLE_GNU_start_end_entry. */
10369 dw2_asm_output_delta (4, curr->end, curr->begin,
10370 "Location list range length (%s)",
10371 list_head->ll_symbol);
10372 }
10373 else if (!have_multiple_function_sections)
10374 {
10375 /* Pair of relative addresses against start of text section. */
10376 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10377 "Location list begin address (%s)",
10378 list_head->ll_symbol);
10379 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10380 "Location list end address (%s)",
10381 list_head->ll_symbol);
10382 }
10383 else
10384 {
10385 /* Pair of absolute addresses. */
10386 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10387 "Location list begin address (%s)",
10388 list_head->ll_symbol);
10389 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10390 "Location list end address (%s)",
10391 list_head->ll_symbol);
10392 }
10393
10394 /* Output the block length for this list of location operations. */
10395 if (dwarf_version >= 5)
10396 dw2_asm_output_data_uleb128 (size, "Location expression size");
10397 else
10398 {
10399 gcc_assert (size <= 0xffff);
10400 dw2_asm_output_data (2, size, "Location expression size");
10401 }
10402
10403 output_loc_sequence (curr->expr, -1);
10404 }
10405
10406 /* And finally list termination. */
10407 if (dwarf_version >= 5)
10408 dw2_asm_output_data (1, DW_LLE_end_of_list,
10409 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10410 else if (dwarf_split_debug_info)
10411 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10412 "Location list terminator (%s)",
10413 list_head->ll_symbol);
10414 else
10415 {
10416 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10417 "Location list terminator begin (%s)",
10418 list_head->ll_symbol);
10419 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10420 "Location list terminator end (%s)",
10421 list_head->ll_symbol);
10422 }
10423
10424 gcc_assert (!list_head->vl_symbol
10425 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10426 }
10427
10428 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10429 section. Emit a relocated reference if val_entry is NULL, otherwise,
10430 emit an indirect reference. */
10431
10432 static void
10433 output_range_list_offset (dw_attr_node *a)
10434 {
10435 const char *name = dwarf_attr_name (a->dw_attr);
10436
10437 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10438 {
10439 if (dwarf_version >= 5)
10440 {
10441 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10442 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10443 debug_ranges_section, "%s", name);
10444 }
10445 else
10446 {
10447 char *p = strchr (ranges_section_label, '\0');
10448 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10449 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10450 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10451 debug_ranges_section, "%s", name);
10452 *p = '\0';
10453 }
10454 }
10455 else if (dwarf_version >= 5)
10456 {
10457 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10458 gcc_assert (rnglist_idx);
10459 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10460 }
10461 else
10462 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10463 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10464 "%s (offset from %s)", name, ranges_section_label);
10465 }
10466
10467 /* Output the offset into the debug_loc section. */
10468
10469 static void
10470 output_loc_list_offset (dw_attr_node *a)
10471 {
10472 char *sym = AT_loc_list (a)->ll_symbol;
10473
10474 gcc_assert (sym);
10475 if (!dwarf_split_debug_info)
10476 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10477 "%s", dwarf_attr_name (a->dw_attr));
10478 else if (dwarf_version >= 5)
10479 {
10480 gcc_assert (AT_loc_list (a)->num_assigned);
10481 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10482 dwarf_attr_name (a->dw_attr),
10483 sym);
10484 }
10485 else
10486 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10487 "%s", dwarf_attr_name (a->dw_attr));
10488 }
10489
10490 /* Output the offset into the debug_loc section. */
10491
10492 static void
10493 output_view_list_offset (dw_attr_node *a)
10494 {
10495 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10496
10497 gcc_assert (sym);
10498 if (dwarf_split_debug_info)
10499 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10500 "%s", dwarf_attr_name (a->dw_attr));
10501 else
10502 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10503 "%s", dwarf_attr_name (a->dw_attr));
10504 }
10505
10506 /* Output an attribute's index or value appropriately. */
10507
10508 static void
10509 output_attr_index_or_value (dw_attr_node *a)
10510 {
10511 const char *name = dwarf_attr_name (a->dw_attr);
10512
10513 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10514 {
10515 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10516 return;
10517 }
10518 switch (AT_class (a))
10519 {
10520 case dw_val_class_addr:
10521 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10522 break;
10523 case dw_val_class_high_pc:
10524 case dw_val_class_lbl_id:
10525 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10526 break;
10527 default:
10528 gcc_unreachable ();
10529 }
10530 }
10531
10532 /* Output a type signature. */
10533
10534 static inline void
10535 output_signature (const char *sig, const char *name)
10536 {
10537 int i;
10538
10539 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10540 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10541 }
10542
10543 /* Output a discriminant value. */
10544
10545 static inline void
10546 output_discr_value (dw_discr_value *discr_value, const char *name)
10547 {
10548 if (discr_value->pos)
10549 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10550 else
10551 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10552 }
10553
10554 /* Output the DIE and its attributes. Called recursively to generate
10555 the definitions of each child DIE. */
10556
10557 static void
10558 output_die (dw_die_ref die)
10559 {
10560 dw_attr_node *a;
10561 dw_die_ref c;
10562 unsigned long size;
10563 unsigned ix;
10564
10565 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10566 (unsigned long)die->die_offset,
10567 dwarf_tag_name (die->die_tag));
10568
10569 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10570 {
10571 const char *name = dwarf_attr_name (a->dw_attr);
10572
10573 switch (AT_class (a))
10574 {
10575 case dw_val_class_addr:
10576 output_attr_index_or_value (a);
10577 break;
10578
10579 case dw_val_class_offset:
10580 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10581 "%s", name);
10582 break;
10583
10584 case dw_val_class_range_list:
10585 output_range_list_offset (a);
10586 break;
10587
10588 case dw_val_class_loc:
10589 size = size_of_locs (AT_loc (a));
10590
10591 /* Output the block length for this list of location operations. */
10592 if (dwarf_version >= 4)
10593 dw2_asm_output_data_uleb128 (size, "%s", name);
10594 else
10595 dw2_asm_output_data (constant_size (size), size, "%s", name);
10596
10597 output_loc_sequence (AT_loc (a), -1);
10598 break;
10599
10600 case dw_val_class_const:
10601 /* ??? It would be slightly more efficient to use a scheme like is
10602 used for unsigned constants below, but gdb 4.x does not sign
10603 extend. Gdb 5.x does sign extend. */
10604 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10605 break;
10606
10607 case dw_val_class_unsigned_const:
10608 {
10609 int csize = constant_size (AT_unsigned (a));
10610 if (dwarf_version == 3
10611 && a->dw_attr == DW_AT_data_member_location
10612 && csize >= 4)
10613 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10614 else
10615 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10616 }
10617 break;
10618
10619 case dw_val_class_symview:
10620 {
10621 int vsize;
10622 if (symview_upper_bound <= 0xff)
10623 vsize = 1;
10624 else if (symview_upper_bound <= 0xffff)
10625 vsize = 2;
10626 else if (symview_upper_bound <= 0xffffffff)
10627 vsize = 4;
10628 else
10629 vsize = 8;
10630 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10631 "%s", name);
10632 }
10633 break;
10634
10635 case dw_val_class_const_implicit:
10636 if (flag_debug_asm)
10637 fprintf (asm_out_file, "\t\t\t%s %s ("
10638 HOST_WIDE_INT_PRINT_DEC ")\n",
10639 ASM_COMMENT_START, name, AT_int (a));
10640 break;
10641
10642 case dw_val_class_unsigned_const_implicit:
10643 if (flag_debug_asm)
10644 fprintf (asm_out_file, "\t\t\t%s %s ("
10645 HOST_WIDE_INT_PRINT_HEX ")\n",
10646 ASM_COMMENT_START, name, AT_unsigned (a));
10647 break;
10648
10649 case dw_val_class_const_double:
10650 {
10651 unsigned HOST_WIDE_INT first, second;
10652
10653 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10654 dw2_asm_output_data (1,
10655 HOST_BITS_PER_DOUBLE_INT
10656 / HOST_BITS_PER_CHAR,
10657 NULL);
10658
10659 if (WORDS_BIG_ENDIAN)
10660 {
10661 first = a->dw_attr_val.v.val_double.high;
10662 second = a->dw_attr_val.v.val_double.low;
10663 }
10664 else
10665 {
10666 first = a->dw_attr_val.v.val_double.low;
10667 second = a->dw_attr_val.v.val_double.high;
10668 }
10669
10670 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10671 first, "%s", name);
10672 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10673 second, NULL);
10674 }
10675 break;
10676
10677 case dw_val_class_wide_int:
10678 {
10679 int i;
10680 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10681 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10682 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10683 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10684 * l, NULL);
10685
10686 if (WORDS_BIG_ENDIAN)
10687 for (i = len - 1; i >= 0; --i)
10688 {
10689 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10690 "%s", name);
10691 name = "";
10692 }
10693 else
10694 for (i = 0; i < len; ++i)
10695 {
10696 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10697 "%s", name);
10698 name = "";
10699 }
10700 }
10701 break;
10702
10703 case dw_val_class_vec:
10704 {
10705 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10706 unsigned int len = a->dw_attr_val.v.val_vec.length;
10707 unsigned int i;
10708 unsigned char *p;
10709
10710 dw2_asm_output_data (constant_size (len * elt_size),
10711 len * elt_size, "%s", name);
10712 if (elt_size > sizeof (HOST_WIDE_INT))
10713 {
10714 elt_size /= 2;
10715 len *= 2;
10716 }
10717 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10718 i < len;
10719 i++, p += elt_size)
10720 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10721 "fp or vector constant word %u", i);
10722 break;
10723 }
10724
10725 case dw_val_class_flag:
10726 if (dwarf_version >= 4)
10727 {
10728 /* Currently all add_AT_flag calls pass in 1 as last argument,
10729 so DW_FORM_flag_present can be used. If that ever changes,
10730 we'll need to use DW_FORM_flag and have some optimization
10731 in build_abbrev_table that will change those to
10732 DW_FORM_flag_present if it is set to 1 in all DIEs using
10733 the same abbrev entry. */
10734 gcc_assert (AT_flag (a) == 1);
10735 if (flag_debug_asm)
10736 fprintf (asm_out_file, "\t\t\t%s %s\n",
10737 ASM_COMMENT_START, name);
10738 break;
10739 }
10740 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10741 break;
10742
10743 case dw_val_class_loc_list:
10744 output_loc_list_offset (a);
10745 break;
10746
10747 case dw_val_class_view_list:
10748 output_view_list_offset (a);
10749 break;
10750
10751 case dw_val_class_die_ref:
10752 if (AT_ref_external (a))
10753 {
10754 if (AT_ref (a)->comdat_type_p)
10755 {
10756 comdat_type_node *type_node
10757 = AT_ref (a)->die_id.die_type_node;
10758
10759 gcc_assert (type_node);
10760 output_signature (type_node->signature, name);
10761 }
10762 else
10763 {
10764 const char *sym = AT_ref (a)->die_id.die_symbol;
10765 int size;
10766
10767 gcc_assert (sym);
10768 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10769 length, whereas in DWARF3 it's always sized as an
10770 offset. */
10771 if (dwarf_version == 2)
10772 size = DWARF2_ADDR_SIZE;
10773 else
10774 size = DWARF_OFFSET_SIZE;
10775 /* ??? We cannot unconditionally output die_offset if
10776 non-zero - others might create references to those
10777 DIEs via symbols.
10778 And we do not clear its DIE offset after outputting it
10779 (and the label refers to the actual DIEs, not the
10780 DWARF CU unit header which is when using label + offset
10781 would be the correct thing to do).
10782 ??? This is the reason for the with_offset flag. */
10783 if (AT_ref (a)->with_offset)
10784 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10785 debug_info_section, "%s", name);
10786 else
10787 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10788 name);
10789 }
10790 }
10791 else
10792 {
10793 gcc_assert (AT_ref (a)->die_offset);
10794 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10795 "%s", name);
10796 }
10797 break;
10798
10799 case dw_val_class_fde_ref:
10800 {
10801 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10802
10803 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10804 a->dw_attr_val.v.val_fde_index * 2);
10805 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10806 "%s", name);
10807 }
10808 break;
10809
10810 case dw_val_class_vms_delta:
10811 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10812 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10813 AT_vms_delta2 (a), AT_vms_delta1 (a),
10814 "%s", name);
10815 #else
10816 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10817 AT_vms_delta2 (a), AT_vms_delta1 (a),
10818 "%s", name);
10819 #endif
10820 break;
10821
10822 case dw_val_class_lbl_id:
10823 output_attr_index_or_value (a);
10824 break;
10825
10826 case dw_val_class_lineptr:
10827 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10828 debug_line_section, "%s", name);
10829 break;
10830
10831 case dw_val_class_macptr:
10832 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10833 debug_macinfo_section, "%s", name);
10834 break;
10835
10836 case dw_val_class_loclistsptr:
10837 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10838 debug_loc_section, "%s", name);
10839 break;
10840
10841 case dw_val_class_str:
10842 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10843 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10844 a->dw_attr_val.v.val_str->label,
10845 debug_str_section,
10846 "%s: \"%s\"", name, AT_string (a));
10847 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10848 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10849 a->dw_attr_val.v.val_str->label,
10850 debug_line_str_section,
10851 "%s: \"%s\"", name, AT_string (a));
10852 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10853 dw2_asm_output_data_uleb128 (AT_index (a),
10854 "%s: \"%s\"", name, AT_string (a));
10855 else
10856 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10857 break;
10858
10859 case dw_val_class_file:
10860 {
10861 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10862
10863 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10864 a->dw_attr_val.v.val_file->filename);
10865 break;
10866 }
10867
10868 case dw_val_class_file_implicit:
10869 if (flag_debug_asm)
10870 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10871 ASM_COMMENT_START, name,
10872 maybe_emit_file (a->dw_attr_val.v.val_file),
10873 a->dw_attr_val.v.val_file->filename);
10874 break;
10875
10876 case dw_val_class_data8:
10877 {
10878 int i;
10879
10880 for (i = 0; i < 8; i++)
10881 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10882 i == 0 ? "%s" : NULL, name);
10883 break;
10884 }
10885
10886 case dw_val_class_high_pc:
10887 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10888 get_AT_low_pc (die), "DW_AT_high_pc");
10889 break;
10890
10891 case dw_val_class_discr_value:
10892 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10893 break;
10894
10895 case dw_val_class_discr_list:
10896 {
10897 dw_discr_list_ref list = AT_discr_list (a);
10898 const int size = size_of_discr_list (list);
10899
10900 /* This is a block, so output its length first. */
10901 dw2_asm_output_data (constant_size (size), size,
10902 "%s: block size", name);
10903
10904 for (; list != NULL; list = list->dw_discr_next)
10905 {
10906 /* One byte for the discriminant value descriptor, and then as
10907 many LEB128 numbers as required. */
10908 if (list->dw_discr_range)
10909 dw2_asm_output_data (1, DW_DSC_range,
10910 "%s: DW_DSC_range", name);
10911 else
10912 dw2_asm_output_data (1, DW_DSC_label,
10913 "%s: DW_DSC_label", name);
10914
10915 output_discr_value (&list->dw_discr_lower_bound, name);
10916 if (list->dw_discr_range)
10917 output_discr_value (&list->dw_discr_upper_bound, name);
10918 }
10919 break;
10920 }
10921
10922 default:
10923 gcc_unreachable ();
10924 }
10925 }
10926
10927 FOR_EACH_CHILD (die, c, output_die (c));
10928
10929 /* Add null byte to terminate sibling list. */
10930 if (die->die_child != NULL)
10931 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10932 (unsigned long) die->die_offset);
10933 }
10934
10935 /* Output the dwarf version number. */
10936
10937 static void
10938 output_dwarf_version ()
10939 {
10940 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10941 views in loclist. That will change eventually. */
10942 if (dwarf_version == 6)
10943 {
10944 static bool once;
10945 if (!once)
10946 {
10947 warning (0,
10948 "-gdwarf-6 is output as version 5 with incompatibilities");
10949 once = true;
10950 }
10951 dw2_asm_output_data (2, 5, "DWARF version number");
10952 }
10953 else
10954 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10955 }
10956
10957 /* Output the compilation unit that appears at the beginning of the
10958 .debug_info section, and precedes the DIE descriptions. */
10959
10960 static void
10961 output_compilation_unit_header (enum dwarf_unit_type ut)
10962 {
10963 if (!XCOFF_DEBUGGING_INFO)
10964 {
10965 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10966 dw2_asm_output_data (4, 0xffffffff,
10967 "Initial length escape value indicating 64-bit DWARF extension");
10968 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10969 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10970 "Length of Compilation Unit Info");
10971 }
10972
10973 output_dwarf_version ();
10974 if (dwarf_version >= 5)
10975 {
10976 const char *name;
10977 switch (ut)
10978 {
10979 case DW_UT_compile: name = "DW_UT_compile"; break;
10980 case DW_UT_type: name = "DW_UT_type"; break;
10981 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10982 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10983 default: gcc_unreachable ();
10984 }
10985 dw2_asm_output_data (1, ut, "%s", name);
10986 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10987 }
10988 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10989 debug_abbrev_section,
10990 "Offset Into Abbrev. Section");
10991 if (dwarf_version < 5)
10992 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10993 }
10994
10995 /* Output the compilation unit DIE and its children. */
10996
10997 static void
10998 output_comp_unit (dw_die_ref die, int output_if_empty,
10999 const unsigned char *dwo_id)
11000 {
11001 const char *secname, *oldsym;
11002 char *tmp;
11003
11004 /* Unless we are outputting main CU, we may throw away empty ones. */
11005 if (!output_if_empty && die->die_child == NULL)
11006 return;
11007
11008 /* Even if there are no children of this DIE, we must output the information
11009 about the compilation unit. Otherwise, on an empty translation unit, we
11010 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11011 will then complain when examining the file. First mark all the DIEs in
11012 this CU so we know which get local refs. */
11013 mark_dies (die);
11014
11015 external_ref_hash_type *extern_map = optimize_external_refs (die);
11016
11017 /* For now, optimize only the main CU, in order to optimize the rest
11018 we'd need to see all of them earlier. Leave the rest for post-linking
11019 tools like DWZ. */
11020 if (die == comp_unit_die ())
11021 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11022
11023 build_abbrev_table (die, extern_map);
11024
11025 optimize_abbrev_table ();
11026
11027 delete extern_map;
11028
11029 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11030 next_die_offset = (dwo_id
11031 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11032 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11033 calc_die_sizes (die);
11034
11035 oldsym = die->die_id.die_symbol;
11036 if (oldsym && die->comdat_type_p)
11037 {
11038 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11039
11040 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11041 secname = tmp;
11042 die->die_id.die_symbol = NULL;
11043 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11044 }
11045 else
11046 {
11047 switch_to_section (debug_info_section);
11048 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11049 info_section_emitted = true;
11050 }
11051
11052 /* For LTO cross unit DIE refs we want a symbol on the start of the
11053 debuginfo section, not on the CU DIE. */
11054 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11055 {
11056 /* ??? No way to get visibility assembled without a decl. */
11057 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11058 get_identifier (oldsym), char_type_node);
11059 TREE_PUBLIC (decl) = true;
11060 TREE_STATIC (decl) = true;
11061 DECL_ARTIFICIAL (decl) = true;
11062 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11063 DECL_VISIBILITY_SPECIFIED (decl) = true;
11064 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11065 #ifdef ASM_WEAKEN_LABEL
11066 /* We prefer a .weak because that handles duplicates from duplicate
11067 archive members in a graceful way. */
11068 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11069 #else
11070 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11071 #endif
11072 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11073 }
11074
11075 /* Output debugging information. */
11076 output_compilation_unit_header (dwo_id
11077 ? DW_UT_split_compile : DW_UT_compile);
11078 if (dwarf_version >= 5)
11079 {
11080 if (dwo_id != NULL)
11081 for (int i = 0; i < 8; i++)
11082 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11083 }
11084 output_die (die);
11085
11086 /* Leave the marks on the main CU, so we can check them in
11087 output_pubnames. */
11088 if (oldsym)
11089 {
11090 unmark_dies (die);
11091 die->die_id.die_symbol = oldsym;
11092 }
11093 }
11094
11095 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11096 and .debug_pubtypes. This is configured per-target, but can be
11097 overridden by the -gpubnames or -gno-pubnames options. */
11098
11099 static inline bool
11100 want_pubnames (void)
11101 {
11102 if (debug_info_level <= DINFO_LEVEL_TERSE)
11103 return false;
11104 if (debug_generate_pub_sections != -1)
11105 return debug_generate_pub_sections;
11106 return targetm.want_debug_pub_sections;
11107 }
11108
11109 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11110
11111 static void
11112 add_AT_pubnames (dw_die_ref die)
11113 {
11114 if (want_pubnames ())
11115 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11116 }
11117
11118 /* Add a string attribute value to a skeleton DIE. */
11119
11120 static inline void
11121 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11122 const char *str)
11123 {
11124 dw_attr_node attr;
11125 struct indirect_string_node *node;
11126
11127 if (! skeleton_debug_str_hash)
11128 skeleton_debug_str_hash
11129 = hash_table<indirect_string_hasher>::create_ggc (10);
11130
11131 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11132 find_string_form (node);
11133 if (node->form == dwarf_FORM (DW_FORM_strx))
11134 node->form = DW_FORM_strp;
11135
11136 attr.dw_attr = attr_kind;
11137 attr.dw_attr_val.val_class = dw_val_class_str;
11138 attr.dw_attr_val.val_entry = NULL;
11139 attr.dw_attr_val.v.val_str = node;
11140 add_dwarf_attr (die, &attr);
11141 }
11142
11143 /* Helper function to generate top-level dies for skeleton debug_info and
11144 debug_types. */
11145
11146 static void
11147 add_top_level_skeleton_die_attrs (dw_die_ref die)
11148 {
11149 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11150 const char *comp_dir = comp_dir_string ();
11151
11152 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11153 if (comp_dir != NULL)
11154 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11155 add_AT_pubnames (die);
11156 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11157 }
11158
11159 /* Output skeleton debug sections that point to the dwo file. */
11160
11161 static void
11162 output_skeleton_debug_sections (dw_die_ref comp_unit,
11163 const unsigned char *dwo_id)
11164 {
11165 /* These attributes will be found in the full debug_info section. */
11166 remove_AT (comp_unit, DW_AT_producer);
11167 remove_AT (comp_unit, DW_AT_language);
11168
11169 switch_to_section (debug_skeleton_info_section);
11170 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11171
11172 /* Produce the skeleton compilation-unit header. This one differs enough from
11173 a normal CU header that it's better not to call output_compilation_unit
11174 header. */
11175 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11176 dw2_asm_output_data (4, 0xffffffff,
11177 "Initial length escape value indicating 64-bit "
11178 "DWARF extension");
11179
11180 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11181 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11182 - DWARF_INITIAL_LENGTH_SIZE
11183 + size_of_die (comp_unit),
11184 "Length of Compilation Unit Info");
11185 output_dwarf_version ();
11186 if (dwarf_version >= 5)
11187 {
11188 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11189 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11190 }
11191 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11192 debug_skeleton_abbrev_section,
11193 "Offset Into Abbrev. Section");
11194 if (dwarf_version < 5)
11195 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11196 else
11197 for (int i = 0; i < 8; i++)
11198 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11199
11200 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11201 output_die (comp_unit);
11202
11203 /* Build the skeleton debug_abbrev section. */
11204 switch_to_section (debug_skeleton_abbrev_section);
11205 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11206
11207 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11208
11209 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11210 }
11211
11212 /* Output a comdat type unit DIE and its children. */
11213
11214 static void
11215 output_comdat_type_unit (comdat_type_node *node)
11216 {
11217 const char *secname;
11218 char *tmp;
11219 int i;
11220 #if defined (OBJECT_FORMAT_ELF)
11221 tree comdat_key;
11222 #endif
11223
11224 /* First mark all the DIEs in this CU so we know which get local refs. */
11225 mark_dies (node->root_die);
11226
11227 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11228
11229 build_abbrev_table (node->root_die, extern_map);
11230
11231 delete extern_map;
11232 extern_map = NULL;
11233
11234 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11235 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11236 calc_die_sizes (node->root_die);
11237
11238 #if defined (OBJECT_FORMAT_ELF)
11239 if (dwarf_version >= 5)
11240 {
11241 if (!dwarf_split_debug_info)
11242 secname = ".debug_info";
11243 else
11244 secname = ".debug_info.dwo";
11245 }
11246 else if (!dwarf_split_debug_info)
11247 secname = ".debug_types";
11248 else
11249 secname = ".debug_types.dwo";
11250
11251 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11252 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11253 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11254 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11255 comdat_key = get_identifier (tmp);
11256 targetm.asm_out.named_section (secname,
11257 SECTION_DEBUG | SECTION_LINKONCE,
11258 comdat_key);
11259 #else
11260 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11261 sprintf (tmp, (dwarf_version >= 5
11262 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11263 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11264 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11265 secname = tmp;
11266 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11267 #endif
11268
11269 /* Output debugging information. */
11270 output_compilation_unit_header (dwarf_split_debug_info
11271 ? DW_UT_split_type : DW_UT_type);
11272 output_signature (node->signature, "Type Signature");
11273 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11274 "Offset to Type DIE");
11275 output_die (node->root_die);
11276
11277 unmark_dies (node->root_die);
11278 }
11279
11280 /* Return the DWARF2/3 pubname associated with a decl. */
11281
11282 static const char *
11283 dwarf2_name (tree decl, int scope)
11284 {
11285 if (DECL_NAMELESS (decl))
11286 return NULL;
11287 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11288 }
11289
11290 /* Add a new entry to .debug_pubnames if appropriate. */
11291
11292 static void
11293 add_pubname_string (const char *str, dw_die_ref die)
11294 {
11295 pubname_entry e;
11296
11297 e.die = die;
11298 e.name = xstrdup (str);
11299 vec_safe_push (pubname_table, e);
11300 }
11301
11302 static void
11303 add_pubname (tree decl, dw_die_ref die)
11304 {
11305 if (!want_pubnames ())
11306 return;
11307
11308 /* Don't add items to the table when we expect that the consumer will have
11309 just read the enclosing die. For example, if the consumer is looking at a
11310 class_member, it will either be inside the class already, or will have just
11311 looked up the class to find the member. Either way, searching the class is
11312 faster than searching the index. */
11313 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11314 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11315 {
11316 const char *name = dwarf2_name (decl, 1);
11317
11318 if (name)
11319 add_pubname_string (name, die);
11320 }
11321 }
11322
11323 /* Add an enumerator to the pubnames section. */
11324
11325 static void
11326 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11327 {
11328 pubname_entry e;
11329
11330 gcc_assert (scope_name);
11331 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11332 e.die = die;
11333 vec_safe_push (pubname_table, e);
11334 }
11335
11336 /* Add a new entry to .debug_pubtypes if appropriate. */
11337
11338 static void
11339 add_pubtype (tree decl, dw_die_ref die)
11340 {
11341 pubname_entry e;
11342
11343 if (!want_pubnames ())
11344 return;
11345
11346 if ((TREE_PUBLIC (decl)
11347 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11348 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11349 {
11350 tree scope = NULL;
11351 const char *scope_name = "";
11352 const char *sep = is_cxx () ? "::" : ".";
11353 const char *name;
11354
11355 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11356 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11357 {
11358 scope_name = lang_hooks.dwarf_name (scope, 1);
11359 if (scope_name != NULL && scope_name[0] != '\0')
11360 scope_name = concat (scope_name, sep, NULL);
11361 else
11362 scope_name = "";
11363 }
11364
11365 if (TYPE_P (decl))
11366 name = type_tag (decl);
11367 else
11368 name = lang_hooks.dwarf_name (decl, 1);
11369
11370 /* If we don't have a name for the type, there's no point in adding
11371 it to the table. */
11372 if (name != NULL && name[0] != '\0')
11373 {
11374 e.die = die;
11375 e.name = concat (scope_name, name, NULL);
11376 vec_safe_push (pubtype_table, e);
11377 }
11378
11379 /* Although it might be more consistent to add the pubinfo for the
11380 enumerators as their dies are created, they should only be added if the
11381 enum type meets the criteria above. So rather than re-check the parent
11382 enum type whenever an enumerator die is created, just output them all
11383 here. This isn't protected by the name conditional because anonymous
11384 enums don't have names. */
11385 if (die->die_tag == DW_TAG_enumeration_type)
11386 {
11387 dw_die_ref c;
11388
11389 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11390 }
11391 }
11392 }
11393
11394 /* Output a single entry in the pubnames table. */
11395
11396 static void
11397 output_pubname (dw_offset die_offset, pubname_entry *entry)
11398 {
11399 dw_die_ref die = entry->die;
11400 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11401
11402 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11403
11404 if (debug_generate_pub_sections == 2)
11405 {
11406 /* This logic follows gdb's method for determining the value of the flag
11407 byte. */
11408 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11409 switch (die->die_tag)
11410 {
11411 case DW_TAG_typedef:
11412 case DW_TAG_base_type:
11413 case DW_TAG_subrange_type:
11414 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11415 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11416 break;
11417 case DW_TAG_enumerator:
11418 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11419 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11420 if (!is_cxx ())
11421 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11422 break;
11423 case DW_TAG_subprogram:
11424 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11425 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11426 if (!is_ada ())
11427 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11428 break;
11429 case DW_TAG_constant:
11430 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11431 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11432 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11433 break;
11434 case DW_TAG_variable:
11435 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11436 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11437 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11438 break;
11439 case DW_TAG_namespace:
11440 case DW_TAG_imported_declaration:
11441 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11442 break;
11443 case DW_TAG_class_type:
11444 case DW_TAG_interface_type:
11445 case DW_TAG_structure_type:
11446 case DW_TAG_union_type:
11447 case DW_TAG_enumeration_type:
11448 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11449 if (!is_cxx ())
11450 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11451 break;
11452 default:
11453 /* An unusual tag. Leave the flag-byte empty. */
11454 break;
11455 }
11456 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11457 "GDB-index flags");
11458 }
11459
11460 dw2_asm_output_nstring (entry->name, -1, "external name");
11461 }
11462
11463
11464 /* Output the public names table used to speed up access to externally
11465 visible names; or the public types table used to find type definitions. */
11466
11467 static void
11468 output_pubnames (vec<pubname_entry, va_gc> *names)
11469 {
11470 unsigned i;
11471 unsigned long pubnames_length = size_of_pubnames (names);
11472 pubname_entry *pub;
11473
11474 if (!XCOFF_DEBUGGING_INFO)
11475 {
11476 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11477 dw2_asm_output_data (4, 0xffffffff,
11478 "Initial length escape value indicating 64-bit DWARF extension");
11479 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11480 "Pub Info Length");
11481 }
11482
11483 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11484 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11485
11486 if (dwarf_split_debug_info)
11487 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11488 debug_skeleton_info_section,
11489 "Offset of Compilation Unit Info");
11490 else
11491 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11492 debug_info_section,
11493 "Offset of Compilation Unit Info");
11494 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11495 "Compilation Unit Length");
11496
11497 FOR_EACH_VEC_ELT (*names, i, pub)
11498 {
11499 if (include_pubname_in_output (names, pub))
11500 {
11501 dw_offset die_offset = pub->die->die_offset;
11502
11503 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11504 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11505 gcc_assert (pub->die->die_mark);
11506
11507 /* If we're putting types in their own .debug_types sections,
11508 the .debug_pubtypes table will still point to the compile
11509 unit (not the type unit), so we want to use the offset of
11510 the skeleton DIE (if there is one). */
11511 if (pub->die->comdat_type_p && names == pubtype_table)
11512 {
11513 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11514
11515 if (type_node != NULL)
11516 die_offset = (type_node->skeleton_die != NULL
11517 ? type_node->skeleton_die->die_offset
11518 : comp_unit_die ()->die_offset);
11519 }
11520
11521 output_pubname (die_offset, pub);
11522 }
11523 }
11524
11525 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11526 }
11527
11528 /* Output public names and types tables if necessary. */
11529
11530 static void
11531 output_pubtables (void)
11532 {
11533 if (!want_pubnames () || !info_section_emitted)
11534 return;
11535
11536 switch_to_section (debug_pubnames_section);
11537 output_pubnames (pubname_table);
11538 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11539 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11540 simply won't look for the section. */
11541 switch_to_section (debug_pubtypes_section);
11542 output_pubnames (pubtype_table);
11543 }
11544
11545
11546 /* Output the information that goes into the .debug_aranges table.
11547 Namely, define the beginning and ending address range of the
11548 text section generated for this compilation unit. */
11549
11550 static void
11551 output_aranges (void)
11552 {
11553 unsigned i;
11554 unsigned long aranges_length = size_of_aranges ();
11555
11556 if (!XCOFF_DEBUGGING_INFO)
11557 {
11558 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11559 dw2_asm_output_data (4, 0xffffffff,
11560 "Initial length escape value indicating 64-bit DWARF extension");
11561 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11562 "Length of Address Ranges Info");
11563 }
11564
11565 /* Version number for aranges is still 2, even up to DWARF5. */
11566 dw2_asm_output_data (2, 2, "DWARF aranges version");
11567 if (dwarf_split_debug_info)
11568 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11569 debug_skeleton_info_section,
11570 "Offset of Compilation Unit Info");
11571 else
11572 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11573 debug_info_section,
11574 "Offset of Compilation Unit Info");
11575 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11576 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11577
11578 /* We need to align to twice the pointer size here. */
11579 if (DWARF_ARANGES_PAD_SIZE)
11580 {
11581 /* Pad using a 2 byte words so that padding is correct for any
11582 pointer size. */
11583 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11584 2 * DWARF2_ADDR_SIZE);
11585 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11586 dw2_asm_output_data (2, 0, NULL);
11587 }
11588
11589 /* It is necessary not to output these entries if the sections were
11590 not used; if the sections were not used, the length will be 0 and
11591 the address may end up as 0 if the section is discarded by ld
11592 --gc-sections, leaving an invalid (0, 0) entry that can be
11593 confused with the terminator. */
11594 if (text_section_used)
11595 {
11596 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11597 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11598 text_section_label, "Length");
11599 }
11600 if (cold_text_section_used)
11601 {
11602 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11603 "Address");
11604 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11605 cold_text_section_label, "Length");
11606 }
11607
11608 if (have_multiple_function_sections)
11609 {
11610 unsigned fde_idx;
11611 dw_fde_ref fde;
11612
11613 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11614 {
11615 if (DECL_IGNORED_P (fde->decl))
11616 continue;
11617 if (!fde->in_std_section)
11618 {
11619 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11620 "Address");
11621 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11622 fde->dw_fde_begin, "Length");
11623 }
11624 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11625 {
11626 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11627 "Address");
11628 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11629 fde->dw_fde_second_begin, "Length");
11630 }
11631 }
11632 }
11633
11634 /* Output the terminator words. */
11635 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11636 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11637 }
11638
11639 /* Add a new entry to .debug_ranges. Return its index into
11640 ranges_table vector. */
11641
11642 static unsigned int
11643 add_ranges_num (int num, bool maybe_new_sec)
11644 {
11645 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11646 vec_safe_push (ranges_table, r);
11647 return vec_safe_length (ranges_table) - 1;
11648 }
11649
11650 /* Add a new entry to .debug_ranges corresponding to a block, or a
11651 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11652 this entry might be in a different section from previous range. */
11653
11654 static unsigned int
11655 add_ranges (const_tree block, bool maybe_new_sec)
11656 {
11657 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11658 }
11659
11660 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11661 chain, or middle entry of a chain that will be directly referred to. */
11662
11663 static void
11664 note_rnglist_head (unsigned int offset)
11665 {
11666 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11667 return;
11668 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11669 }
11670
11671 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11672 When using dwarf_split_debug_info, address attributes in dies destined
11673 for the final executable should be direct references--setting the
11674 parameter force_direct ensures this behavior. */
11675
11676 static void
11677 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11678 bool *added, bool force_direct)
11679 {
11680 unsigned int in_use = vec_safe_length (ranges_by_label);
11681 unsigned int offset;
11682 dw_ranges_by_label rbl = { begin, end };
11683 vec_safe_push (ranges_by_label, rbl);
11684 offset = add_ranges_num (-(int)in_use - 1, true);
11685 if (!*added)
11686 {
11687 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11688 *added = true;
11689 note_rnglist_head (offset);
11690 }
11691 }
11692
11693 /* Emit .debug_ranges section. */
11694
11695 static void
11696 output_ranges (void)
11697 {
11698 unsigned i;
11699 static const char *const start_fmt = "Offset %#x";
11700 const char *fmt = start_fmt;
11701 dw_ranges *r;
11702
11703 switch_to_section (debug_ranges_section);
11704 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11705 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11706 {
11707 int block_num = r->num;
11708
11709 if (block_num > 0)
11710 {
11711 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11712 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11713
11714 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11715 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11716
11717 /* If all code is in the text section, then the compilation
11718 unit base address defaults to DW_AT_low_pc, which is the
11719 base of the text section. */
11720 if (!have_multiple_function_sections)
11721 {
11722 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11723 text_section_label,
11724 fmt, i * 2 * DWARF2_ADDR_SIZE);
11725 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11726 text_section_label, NULL);
11727 }
11728
11729 /* Otherwise, the compilation unit base address is zero,
11730 which allows us to use absolute addresses, and not worry
11731 about whether the target supports cross-section
11732 arithmetic. */
11733 else
11734 {
11735 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11736 fmt, i * 2 * DWARF2_ADDR_SIZE);
11737 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11738 }
11739
11740 fmt = NULL;
11741 }
11742
11743 /* Negative block_num stands for an index into ranges_by_label. */
11744 else if (block_num < 0)
11745 {
11746 int lab_idx = - block_num - 1;
11747
11748 if (!have_multiple_function_sections)
11749 {
11750 gcc_unreachable ();
11751 #if 0
11752 /* If we ever use add_ranges_by_labels () for a single
11753 function section, all we have to do is to take out
11754 the #if 0 above. */
11755 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11756 (*ranges_by_label)[lab_idx].begin,
11757 text_section_label,
11758 fmt, i * 2 * DWARF2_ADDR_SIZE);
11759 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11760 (*ranges_by_label)[lab_idx].end,
11761 text_section_label, NULL);
11762 #endif
11763 }
11764 else
11765 {
11766 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11767 (*ranges_by_label)[lab_idx].begin,
11768 fmt, i * 2 * DWARF2_ADDR_SIZE);
11769 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11770 (*ranges_by_label)[lab_idx].end,
11771 NULL);
11772 }
11773 }
11774 else
11775 {
11776 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11777 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11778 fmt = start_fmt;
11779 }
11780 }
11781 }
11782
11783 /* Non-zero if .debug_line_str should be used for .debug_line section
11784 strings or strings that are likely shareable with those. */
11785 #define DWARF5_USE_DEBUG_LINE_STR \
11786 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11787 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11788 /* FIXME: there is no .debug_line_str.dwo section, \
11789 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11790 && !dwarf_split_debug_info)
11791
11792 /* Assign .debug_rnglists indexes. */
11793
11794 static void
11795 index_rnglists (void)
11796 {
11797 unsigned i;
11798 dw_ranges *r;
11799
11800 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11801 if (r->label)
11802 r->idx = rnglist_idx++;
11803 }
11804
11805 /* Emit .debug_rnglists section. */
11806
11807 static void
11808 output_rnglists (unsigned generation)
11809 {
11810 unsigned i;
11811 dw_ranges *r;
11812 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11813 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11814 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11815
11816 switch_to_section (debug_ranges_section);
11817 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11818 /* There are up to 4 unique ranges labels per generation.
11819 See also init_sections_and_labels. */
11820 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11821 2 + generation * 4);
11822 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11823 3 + generation * 4);
11824 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11825 dw2_asm_output_data (4, 0xffffffff,
11826 "Initial length escape value indicating "
11827 "64-bit DWARF extension");
11828 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11829 "Length of Range Lists");
11830 ASM_OUTPUT_LABEL (asm_out_file, l1);
11831 output_dwarf_version ();
11832 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11833 dw2_asm_output_data (1, 0, "Segment Size");
11834 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11835 about relocation sizes and primarily care about the size of .debug*
11836 sections in linked shared libraries and executables, then
11837 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11838 into it are usually larger than just DW_FORM_sec_offset offsets
11839 into the .debug_rnglists section. */
11840 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11841 "Offset Entry Count");
11842 if (dwarf_split_debug_info)
11843 {
11844 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11845 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11846 if (r->label)
11847 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11848 ranges_base_label, NULL);
11849 }
11850
11851 const char *lab = "";
11852 unsigned int len = vec_safe_length (ranges_table);
11853 const char *base = NULL;
11854 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11855 {
11856 int block_num = r->num;
11857
11858 if (r->label)
11859 {
11860 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11861 lab = r->label;
11862 }
11863 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11864 base = NULL;
11865 if (block_num > 0)
11866 {
11867 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11868 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11869
11870 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11871 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11872
11873 if (HAVE_AS_LEB128)
11874 {
11875 /* If all code is in the text section, then the compilation
11876 unit base address defaults to DW_AT_low_pc, which is the
11877 base of the text section. */
11878 if (!have_multiple_function_sections)
11879 {
11880 dw2_asm_output_data (1, DW_RLE_offset_pair,
11881 "DW_RLE_offset_pair (%s)", lab);
11882 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11883 "Range begin address (%s)", lab);
11884 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11885 "Range end address (%s)", lab);
11886 continue;
11887 }
11888 if (base == NULL)
11889 {
11890 dw_ranges *r2 = NULL;
11891 if (i < len - 1)
11892 r2 = &(*ranges_table)[i + 1];
11893 if (r2
11894 && r2->num != 0
11895 && r2->label == NULL
11896 && !r2->maybe_new_sec)
11897 {
11898 dw2_asm_output_data (1, DW_RLE_base_address,
11899 "DW_RLE_base_address (%s)", lab);
11900 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11901 "Base address (%s)", lab);
11902 strcpy (basebuf, blabel);
11903 base = basebuf;
11904 }
11905 }
11906 if (base)
11907 {
11908 dw2_asm_output_data (1, DW_RLE_offset_pair,
11909 "DW_RLE_offset_pair (%s)", lab);
11910 dw2_asm_output_delta_uleb128 (blabel, base,
11911 "Range begin address (%s)", lab);
11912 dw2_asm_output_delta_uleb128 (elabel, base,
11913 "Range end address (%s)", lab);
11914 continue;
11915 }
11916 dw2_asm_output_data (1, DW_RLE_start_length,
11917 "DW_RLE_start_length (%s)", lab);
11918 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11919 "Range begin address (%s)", lab);
11920 dw2_asm_output_delta_uleb128 (elabel, blabel,
11921 "Range length (%s)", lab);
11922 }
11923 else
11924 {
11925 dw2_asm_output_data (1, DW_RLE_start_end,
11926 "DW_RLE_start_end (%s)", lab);
11927 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11928 "Range begin address (%s)", lab);
11929 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11930 "Range end address (%s)", lab);
11931 }
11932 }
11933
11934 /* Negative block_num stands for an index into ranges_by_label. */
11935 else if (block_num < 0)
11936 {
11937 int lab_idx = - block_num - 1;
11938 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11939 const char *elabel = (*ranges_by_label)[lab_idx].end;
11940
11941 if (!have_multiple_function_sections)
11942 gcc_unreachable ();
11943 if (HAVE_AS_LEB128)
11944 {
11945 dw2_asm_output_data (1, DW_RLE_start_length,
11946 "DW_RLE_start_length (%s)", lab);
11947 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11948 "Range begin address (%s)", lab);
11949 dw2_asm_output_delta_uleb128 (elabel, blabel,
11950 "Range length (%s)", lab);
11951 }
11952 else
11953 {
11954 dw2_asm_output_data (1, DW_RLE_start_end,
11955 "DW_RLE_start_end (%s)", lab);
11956 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11957 "Range begin address (%s)", lab);
11958 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11959 "Range end address (%s)", lab);
11960 }
11961 }
11962 else
11963 dw2_asm_output_data (1, DW_RLE_end_of_list,
11964 "DW_RLE_end_of_list (%s)", lab);
11965 }
11966 ASM_OUTPUT_LABEL (asm_out_file, l2);
11967 }
11968
11969 /* Data structure containing information about input files. */
11970 struct file_info
11971 {
11972 const char *path; /* Complete file name. */
11973 const char *fname; /* File name part. */
11974 int length; /* Length of entire string. */
11975 struct dwarf_file_data * file_idx; /* Index in input file table. */
11976 int dir_idx; /* Index in directory table. */
11977 };
11978
11979 /* Data structure containing information about directories with source
11980 files. */
11981 struct dir_info
11982 {
11983 const char *path; /* Path including directory name. */
11984 int length; /* Path length. */
11985 int prefix; /* Index of directory entry which is a prefix. */
11986 int count; /* Number of files in this directory. */
11987 int dir_idx; /* Index of directory used as base. */
11988 };
11989
11990 /* Callback function for file_info comparison. We sort by looking at
11991 the directories in the path. */
11992
11993 static int
11994 file_info_cmp (const void *p1, const void *p2)
11995 {
11996 const struct file_info *const s1 = (const struct file_info *) p1;
11997 const struct file_info *const s2 = (const struct file_info *) p2;
11998 const unsigned char *cp1;
11999 const unsigned char *cp2;
12000
12001 /* Take care of file names without directories. We need to make sure that
12002 we return consistent values to qsort since some will get confused if
12003 we return the same value when identical operands are passed in opposite
12004 orders. So if neither has a directory, return 0 and otherwise return
12005 1 or -1 depending on which one has the directory. We want the one with
12006 the directory to sort after the one without, so all no directory files
12007 are at the start (normally only the compilation unit file). */
12008 if ((s1->path == s1->fname || s2->path == s2->fname))
12009 return (s2->path == s2->fname) - (s1->path == s1->fname);
12010
12011 cp1 = (const unsigned char *) s1->path;
12012 cp2 = (const unsigned char *) s2->path;
12013
12014 while (1)
12015 {
12016 ++cp1;
12017 ++cp2;
12018 /* Reached the end of the first path? If so, handle like above,
12019 but now we want longer directory prefixes before shorter ones. */
12020 if ((cp1 == (const unsigned char *) s1->fname)
12021 || (cp2 == (const unsigned char *) s2->fname))
12022 return ((cp1 == (const unsigned char *) s1->fname)
12023 - (cp2 == (const unsigned char *) s2->fname));
12024
12025 /* Character of current path component the same? */
12026 else if (*cp1 != *cp2)
12027 return *cp1 - *cp2;
12028 }
12029 }
12030
12031 struct file_name_acquire_data
12032 {
12033 struct file_info *files;
12034 int used_files;
12035 int max_files;
12036 };
12037
12038 /* Traversal function for the hash table. */
12039
12040 int
12041 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12042 {
12043 struct dwarf_file_data *d = *slot;
12044 struct file_info *fi;
12045 const char *f;
12046
12047 gcc_assert (fnad->max_files >= d->emitted_number);
12048
12049 if (! d->emitted_number)
12050 return 1;
12051
12052 gcc_assert (fnad->max_files != fnad->used_files);
12053
12054 fi = fnad->files + fnad->used_files++;
12055
12056 /* Skip all leading "./". */
12057 f = d->filename;
12058 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12059 f += 2;
12060
12061 /* Create a new array entry. */
12062 fi->path = f;
12063 fi->length = strlen (f);
12064 fi->file_idx = d;
12065
12066 /* Search for the file name part. */
12067 f = strrchr (f, DIR_SEPARATOR);
12068 #if defined (DIR_SEPARATOR_2)
12069 {
12070 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12071
12072 if (g != NULL)
12073 {
12074 if (f == NULL || f < g)
12075 f = g;
12076 }
12077 }
12078 #endif
12079
12080 fi->fname = f == NULL ? fi->path : f + 1;
12081 return 1;
12082 }
12083
12084 /* Helper function for output_file_names. Emit a FORM encoded
12085 string STR, with assembly comment start ENTRY_KIND and
12086 index IDX */
12087
12088 static void
12089 output_line_string (enum dwarf_form form, const char *str,
12090 const char *entry_kind, unsigned int idx)
12091 {
12092 switch (form)
12093 {
12094 case DW_FORM_string:
12095 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12096 break;
12097 case DW_FORM_line_strp:
12098 if (!debug_line_str_hash)
12099 debug_line_str_hash
12100 = hash_table<indirect_string_hasher>::create_ggc (10);
12101
12102 struct indirect_string_node *node;
12103 node = find_AT_string_in_table (str, debug_line_str_hash);
12104 set_indirect_string (node);
12105 node->form = form;
12106 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12107 debug_line_str_section, "%s: %#x: \"%s\"",
12108 entry_kind, 0, node->str);
12109 break;
12110 default:
12111 gcc_unreachable ();
12112 }
12113 }
12114
12115 /* Output the directory table and the file name table. We try to minimize
12116 the total amount of memory needed. A heuristic is used to avoid large
12117 slowdowns with many input files. */
12118
12119 static void
12120 output_file_names (void)
12121 {
12122 struct file_name_acquire_data fnad;
12123 int numfiles;
12124 struct file_info *files;
12125 struct dir_info *dirs;
12126 int *saved;
12127 int *savehere;
12128 int *backmap;
12129 int ndirs;
12130 int idx_offset;
12131 int i;
12132
12133 if (!last_emitted_file)
12134 {
12135 if (dwarf_version >= 5)
12136 {
12137 dw2_asm_output_data (1, 0, "Directory entry format count");
12138 dw2_asm_output_data_uleb128 (0, "Directories count");
12139 dw2_asm_output_data (1, 0, "File name entry format count");
12140 dw2_asm_output_data_uleb128 (0, "File names count");
12141 }
12142 else
12143 {
12144 dw2_asm_output_data (1, 0, "End directory table");
12145 dw2_asm_output_data (1, 0, "End file name table");
12146 }
12147 return;
12148 }
12149
12150 numfiles = last_emitted_file->emitted_number;
12151
12152 /* Allocate the various arrays we need. */
12153 files = XALLOCAVEC (struct file_info, numfiles);
12154 dirs = XALLOCAVEC (struct dir_info, numfiles);
12155
12156 fnad.files = files;
12157 fnad.used_files = 0;
12158 fnad.max_files = numfiles;
12159 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12160 gcc_assert (fnad.used_files == fnad.max_files);
12161
12162 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12163
12164 /* Find all the different directories used. */
12165 dirs[0].path = files[0].path;
12166 dirs[0].length = files[0].fname - files[0].path;
12167 dirs[0].prefix = -1;
12168 dirs[0].count = 1;
12169 dirs[0].dir_idx = 0;
12170 files[0].dir_idx = 0;
12171 ndirs = 1;
12172
12173 for (i = 1; i < numfiles; i++)
12174 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12175 && memcmp (dirs[ndirs - 1].path, files[i].path,
12176 dirs[ndirs - 1].length) == 0)
12177 {
12178 /* Same directory as last entry. */
12179 files[i].dir_idx = ndirs - 1;
12180 ++dirs[ndirs - 1].count;
12181 }
12182 else
12183 {
12184 int j;
12185
12186 /* This is a new directory. */
12187 dirs[ndirs].path = files[i].path;
12188 dirs[ndirs].length = files[i].fname - files[i].path;
12189 dirs[ndirs].count = 1;
12190 dirs[ndirs].dir_idx = ndirs;
12191 files[i].dir_idx = ndirs;
12192
12193 /* Search for a prefix. */
12194 dirs[ndirs].prefix = -1;
12195 for (j = 0; j < ndirs; j++)
12196 if (dirs[j].length < dirs[ndirs].length
12197 && dirs[j].length > 1
12198 && (dirs[ndirs].prefix == -1
12199 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12200 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12201 dirs[ndirs].prefix = j;
12202
12203 ++ndirs;
12204 }
12205
12206 /* Now to the actual work. We have to find a subset of the directories which
12207 allow expressing the file name using references to the directory table
12208 with the least amount of characters. We do not do an exhaustive search
12209 where we would have to check out every combination of every single
12210 possible prefix. Instead we use a heuristic which provides nearly optimal
12211 results in most cases and never is much off. */
12212 saved = XALLOCAVEC (int, ndirs);
12213 savehere = XALLOCAVEC (int, ndirs);
12214
12215 memset (saved, '\0', ndirs * sizeof (saved[0]));
12216 for (i = 0; i < ndirs; i++)
12217 {
12218 int j;
12219 int total;
12220
12221 /* We can always save some space for the current directory. But this
12222 does not mean it will be enough to justify adding the directory. */
12223 savehere[i] = dirs[i].length;
12224 total = (savehere[i] - saved[i]) * dirs[i].count;
12225
12226 for (j = i + 1; j < ndirs; j++)
12227 {
12228 savehere[j] = 0;
12229 if (saved[j] < dirs[i].length)
12230 {
12231 /* Determine whether the dirs[i] path is a prefix of the
12232 dirs[j] path. */
12233 int k;
12234
12235 k = dirs[j].prefix;
12236 while (k != -1 && k != (int) i)
12237 k = dirs[k].prefix;
12238
12239 if (k == (int) i)
12240 {
12241 /* Yes it is. We can possibly save some memory by
12242 writing the filenames in dirs[j] relative to
12243 dirs[i]. */
12244 savehere[j] = dirs[i].length;
12245 total += (savehere[j] - saved[j]) * dirs[j].count;
12246 }
12247 }
12248 }
12249
12250 /* Check whether we can save enough to justify adding the dirs[i]
12251 directory. */
12252 if (total > dirs[i].length + 1)
12253 {
12254 /* It's worthwhile adding. */
12255 for (j = i; j < ndirs; j++)
12256 if (savehere[j] > 0)
12257 {
12258 /* Remember how much we saved for this directory so far. */
12259 saved[j] = savehere[j];
12260
12261 /* Remember the prefix directory. */
12262 dirs[j].dir_idx = i;
12263 }
12264 }
12265 }
12266
12267 /* Emit the directory name table. */
12268 idx_offset = dirs[0].length > 0 ? 1 : 0;
12269 enum dwarf_form str_form = DW_FORM_string;
12270 enum dwarf_form idx_form = DW_FORM_udata;
12271 if (dwarf_version >= 5)
12272 {
12273 const char *comp_dir = comp_dir_string ();
12274 if (comp_dir == NULL)
12275 comp_dir = "";
12276 dw2_asm_output_data (1, 1, "Directory entry format count");
12277 if (DWARF5_USE_DEBUG_LINE_STR)
12278 str_form = DW_FORM_line_strp;
12279 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12280 dw2_asm_output_data_uleb128 (str_form, "%s",
12281 get_DW_FORM_name (str_form));
12282 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12283 if (str_form == DW_FORM_string)
12284 {
12285 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12286 for (i = 1 - idx_offset; i < ndirs; i++)
12287 dw2_asm_output_nstring (dirs[i].path,
12288 dirs[i].length
12289 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12290 "Directory Entry: %#x", i + idx_offset);
12291 }
12292 else
12293 {
12294 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12295 for (i = 1 - idx_offset; i < ndirs; i++)
12296 {
12297 const char *str
12298 = ggc_alloc_string (dirs[i].path,
12299 dirs[i].length
12300 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12301 output_line_string (str_form, str, "Directory Entry",
12302 (unsigned) i + idx_offset);
12303 }
12304 }
12305 }
12306 else
12307 {
12308 for (i = 1 - idx_offset; i < ndirs; i++)
12309 dw2_asm_output_nstring (dirs[i].path,
12310 dirs[i].length
12311 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12312 "Directory Entry: %#x", i + idx_offset);
12313
12314 dw2_asm_output_data (1, 0, "End directory table");
12315 }
12316
12317 /* We have to emit them in the order of emitted_number since that's
12318 used in the debug info generation. To do this efficiently we
12319 generate a back-mapping of the indices first. */
12320 backmap = XALLOCAVEC (int, numfiles);
12321 for (i = 0; i < numfiles; i++)
12322 backmap[files[i].file_idx->emitted_number - 1] = i;
12323
12324 if (dwarf_version >= 5)
12325 {
12326 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12327 if (filename0 == NULL)
12328 filename0 = "";
12329 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12330 DW_FORM_data2. Choose one based on the number of directories
12331 and how much space would they occupy in each encoding.
12332 If we have at most 256 directories, all indexes fit into
12333 a single byte, so DW_FORM_data1 is most compact (if there
12334 are at most 128 directories, DW_FORM_udata would be as
12335 compact as that, but not shorter and slower to decode). */
12336 if (ndirs + idx_offset <= 256)
12337 idx_form = DW_FORM_data1;
12338 /* If there are more than 65536 directories, we have to use
12339 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12340 Otherwise, compute what space would occupy if all the indexes
12341 used DW_FORM_udata - sum - and compare that to how large would
12342 be DW_FORM_data2 encoding, and pick the more efficient one. */
12343 else if (ndirs + idx_offset <= 65536)
12344 {
12345 unsigned HOST_WIDE_INT sum = 1;
12346 for (i = 0; i < numfiles; i++)
12347 {
12348 int file_idx = backmap[i];
12349 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12350 sum += size_of_uleb128 (dir_idx);
12351 }
12352 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12353 idx_form = DW_FORM_data2;
12354 }
12355 #ifdef VMS_DEBUGGING_INFO
12356 dw2_asm_output_data (1, 4, "File name entry format count");
12357 #else
12358 dw2_asm_output_data (1, 2, "File name entry format count");
12359 #endif
12360 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12361 dw2_asm_output_data_uleb128 (str_form, "%s",
12362 get_DW_FORM_name (str_form));
12363 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12364 "DW_LNCT_directory_index");
12365 dw2_asm_output_data_uleb128 (idx_form, "%s",
12366 get_DW_FORM_name (idx_form));
12367 #ifdef VMS_DEBUGGING_INFO
12368 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12369 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12370 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12371 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12372 #endif
12373 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12374
12375 output_line_string (str_form, filename0, "File Entry", 0);
12376
12377 /* Include directory index. */
12378 if (idx_form != DW_FORM_udata)
12379 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12380 0, NULL);
12381 else
12382 dw2_asm_output_data_uleb128 (0, NULL);
12383
12384 #ifdef VMS_DEBUGGING_INFO
12385 dw2_asm_output_data_uleb128 (0, NULL);
12386 dw2_asm_output_data_uleb128 (0, NULL);
12387 #endif
12388 }
12389
12390 /* Now write all the file names. */
12391 for (i = 0; i < numfiles; i++)
12392 {
12393 int file_idx = backmap[i];
12394 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12395
12396 #ifdef VMS_DEBUGGING_INFO
12397 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12398
12399 /* Setting these fields can lead to debugger miscomparisons,
12400 but VMS Debug requires them to be set correctly. */
12401
12402 int ver;
12403 long long cdt;
12404 long siz;
12405 int maxfilelen = (strlen (files[file_idx].path)
12406 + dirs[dir_idx].length
12407 + MAX_VMS_VERSION_LEN + 1);
12408 char *filebuf = XALLOCAVEC (char, maxfilelen);
12409
12410 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12411 snprintf (filebuf, maxfilelen, "%s;%d",
12412 files[file_idx].path + dirs[dir_idx].length, ver);
12413
12414 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12415
12416 /* Include directory index. */
12417 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12418 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12419 dir_idx + idx_offset, NULL);
12420 else
12421 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12422
12423 /* Modification time. */
12424 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12425 &cdt, 0, 0, 0) == 0)
12426 ? cdt : 0, NULL);
12427
12428 /* File length in bytes. */
12429 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12430 0, &siz, 0, 0) == 0)
12431 ? siz : 0, NULL);
12432 #else
12433 output_line_string (str_form,
12434 files[file_idx].path + dirs[dir_idx].length,
12435 "File Entry", (unsigned) i + 1);
12436
12437 /* Include directory index. */
12438 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12439 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12440 dir_idx + idx_offset, NULL);
12441 else
12442 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12443
12444 if (dwarf_version >= 5)
12445 continue;
12446
12447 /* Modification time. */
12448 dw2_asm_output_data_uleb128 (0, NULL);
12449
12450 /* File length in bytes. */
12451 dw2_asm_output_data_uleb128 (0, NULL);
12452 #endif /* VMS_DEBUGGING_INFO */
12453 }
12454
12455 if (dwarf_version < 5)
12456 dw2_asm_output_data (1, 0, "End file name table");
12457 }
12458
12459
12460 /* Output one line number table into the .debug_line section. */
12461
12462 static void
12463 output_one_line_info_table (dw_line_info_table *table)
12464 {
12465 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12466 unsigned int current_line = 1;
12467 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12468 dw_line_info_entry *ent, *prev_addr;
12469 size_t i;
12470 unsigned int view;
12471
12472 view = 0;
12473
12474 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12475 {
12476 switch (ent->opcode)
12477 {
12478 case LI_set_address:
12479 /* ??? Unfortunately, we have little choice here currently, and
12480 must always use the most general form. GCC does not know the
12481 address delta itself, so we can't use DW_LNS_advance_pc. Many
12482 ports do have length attributes which will give an upper bound
12483 on the address range. We could perhaps use length attributes
12484 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12485 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12486
12487 view = 0;
12488
12489 /* This can handle any delta. This takes
12490 4+DWARF2_ADDR_SIZE bytes. */
12491 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12492 debug_variable_location_views
12493 ? ", reset view to 0" : "");
12494 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12495 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12496 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12497
12498 prev_addr = ent;
12499 break;
12500
12501 case LI_adv_address:
12502 {
12503 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12504 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12505 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12506
12507 view++;
12508
12509 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12510 dw2_asm_output_delta (2, line_label, prev_label,
12511 "from %s to %s", prev_label, line_label);
12512
12513 prev_addr = ent;
12514 break;
12515 }
12516
12517 case LI_set_line:
12518 if (ent->val == current_line)
12519 {
12520 /* We still need to start a new row, so output a copy insn. */
12521 dw2_asm_output_data (1, DW_LNS_copy,
12522 "copy line %u", current_line);
12523 }
12524 else
12525 {
12526 int line_offset = ent->val - current_line;
12527 int line_delta = line_offset - DWARF_LINE_BASE;
12528
12529 current_line = ent->val;
12530 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12531 {
12532 /* This can handle deltas from -10 to 234, using the current
12533 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12534 This takes 1 byte. */
12535 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12536 "line %u", current_line);
12537 }
12538 else
12539 {
12540 /* This can handle any delta. This takes at least 4 bytes,
12541 depending on the value being encoded. */
12542 dw2_asm_output_data (1, DW_LNS_advance_line,
12543 "advance to line %u", current_line);
12544 dw2_asm_output_data_sleb128 (line_offset, NULL);
12545 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12546 }
12547 }
12548 break;
12549
12550 case LI_set_file:
12551 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12552 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12553 break;
12554
12555 case LI_set_column:
12556 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12557 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12558 break;
12559
12560 case LI_negate_stmt:
12561 current_is_stmt = !current_is_stmt;
12562 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12563 "is_stmt %d", current_is_stmt);
12564 break;
12565
12566 case LI_set_prologue_end:
12567 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12568 "set prologue end");
12569 break;
12570
12571 case LI_set_epilogue_begin:
12572 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12573 "set epilogue begin");
12574 break;
12575
12576 case LI_set_discriminator:
12577 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12578 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12579 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12580 dw2_asm_output_data_uleb128 (ent->val, NULL);
12581 break;
12582 }
12583 }
12584
12585 /* Emit debug info for the address of the end of the table. */
12586 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12587 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12588 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12589 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12590
12591 dw2_asm_output_data (1, 0, "end sequence");
12592 dw2_asm_output_data_uleb128 (1, NULL);
12593 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12594 }
12595
12596 /* Output the source line number correspondence information. This
12597 information goes into the .debug_line section. */
12598
12599 static void
12600 output_line_info (bool prologue_only)
12601 {
12602 static unsigned int generation;
12603 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12604 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12605 bool saw_one = false;
12606 int opc;
12607
12608 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12609 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12610 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12611 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12612
12613 if (!XCOFF_DEBUGGING_INFO)
12614 {
12615 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12616 dw2_asm_output_data (4, 0xffffffff,
12617 "Initial length escape value indicating 64-bit DWARF extension");
12618 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12619 "Length of Source Line Info");
12620 }
12621
12622 ASM_OUTPUT_LABEL (asm_out_file, l1);
12623
12624 output_dwarf_version ();
12625 if (dwarf_version >= 5)
12626 {
12627 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12628 dw2_asm_output_data (1, 0, "Segment Size");
12629 }
12630 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12631 ASM_OUTPUT_LABEL (asm_out_file, p1);
12632
12633 /* Define the architecture-dependent minimum instruction length (in bytes).
12634 In this implementation of DWARF, this field is used for information
12635 purposes only. Since GCC generates assembly language, we have no
12636 a priori knowledge of how many instruction bytes are generated for each
12637 source line, and therefore can use only the DW_LNE_set_address and
12638 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12639 this as '1', which is "correct enough" for all architectures,
12640 and don't let the target override. */
12641 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12642
12643 if (dwarf_version >= 4)
12644 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12645 "Maximum Operations Per Instruction");
12646 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12647 "Default is_stmt_start flag");
12648 dw2_asm_output_data (1, DWARF_LINE_BASE,
12649 "Line Base Value (Special Opcodes)");
12650 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12651 "Line Range Value (Special Opcodes)");
12652 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12653 "Special Opcode Base");
12654
12655 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12656 {
12657 int n_op_args;
12658 switch (opc)
12659 {
12660 case DW_LNS_advance_pc:
12661 case DW_LNS_advance_line:
12662 case DW_LNS_set_file:
12663 case DW_LNS_set_column:
12664 case DW_LNS_fixed_advance_pc:
12665 case DW_LNS_set_isa:
12666 n_op_args = 1;
12667 break;
12668 default:
12669 n_op_args = 0;
12670 break;
12671 }
12672
12673 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12674 opc, n_op_args);
12675 }
12676
12677 /* Write out the information about the files we use. */
12678 output_file_names ();
12679 ASM_OUTPUT_LABEL (asm_out_file, p2);
12680 if (prologue_only)
12681 {
12682 /* Output the marker for the end of the line number info. */
12683 ASM_OUTPUT_LABEL (asm_out_file, l2);
12684 return;
12685 }
12686
12687 if (separate_line_info)
12688 {
12689 dw_line_info_table *table;
12690 size_t i;
12691
12692 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12693 if (table->in_use)
12694 {
12695 output_one_line_info_table (table);
12696 saw_one = true;
12697 }
12698 }
12699 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12700 {
12701 output_one_line_info_table (cold_text_section_line_info);
12702 saw_one = true;
12703 }
12704
12705 /* ??? Some Darwin linkers crash on a .debug_line section with no
12706 sequences. Further, merely a DW_LNE_end_sequence entry is not
12707 sufficient -- the address column must also be initialized.
12708 Make sure to output at least one set_address/end_sequence pair,
12709 choosing .text since that section is always present. */
12710 if (text_section_line_info->in_use || !saw_one)
12711 output_one_line_info_table (text_section_line_info);
12712
12713 /* Output the marker for the end of the line number info. */
12714 ASM_OUTPUT_LABEL (asm_out_file, l2);
12715 }
12716 \f
12717 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12718
12719 static inline bool
12720 need_endianity_attribute_p (bool reverse)
12721 {
12722 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12723 }
12724
12725 /* Given a pointer to a tree node for some base type, return a pointer to
12726 a DIE that describes the given type. REVERSE is true if the type is
12727 to be interpreted in the reverse storage order wrt the target order.
12728
12729 This routine must only be called for GCC type nodes that correspond to
12730 Dwarf base (fundamental) types. */
12731
12732 static dw_die_ref
12733 base_type_die (tree type, bool reverse)
12734 {
12735 dw_die_ref base_type_result;
12736 enum dwarf_type encoding;
12737 bool fpt_used = false;
12738 struct fixed_point_type_info fpt_info;
12739 tree type_bias = NULL_TREE;
12740
12741 /* If this is a subtype that should not be emitted as a subrange type,
12742 use the base type. See subrange_type_for_debug_p. */
12743 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12744 type = TREE_TYPE (type);
12745
12746 switch (TREE_CODE (type))
12747 {
12748 case INTEGER_TYPE:
12749 if ((dwarf_version >= 4 || !dwarf_strict)
12750 && TYPE_NAME (type)
12751 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12752 && DECL_IS_BUILTIN (TYPE_NAME (type))
12753 && DECL_NAME (TYPE_NAME (type)))
12754 {
12755 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12756 if (strcmp (name, "char16_t") == 0
12757 || strcmp (name, "char32_t") == 0)
12758 {
12759 encoding = DW_ATE_UTF;
12760 break;
12761 }
12762 }
12763 if ((dwarf_version >= 3 || !dwarf_strict)
12764 && lang_hooks.types.get_fixed_point_type_info)
12765 {
12766 memset (&fpt_info, 0, sizeof (fpt_info));
12767 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12768 {
12769 fpt_used = true;
12770 encoding = ((TYPE_UNSIGNED (type))
12771 ? DW_ATE_unsigned_fixed
12772 : DW_ATE_signed_fixed);
12773 break;
12774 }
12775 }
12776 if (TYPE_STRING_FLAG (type))
12777 {
12778 if (TYPE_UNSIGNED (type))
12779 encoding = DW_ATE_unsigned_char;
12780 else
12781 encoding = DW_ATE_signed_char;
12782 }
12783 else if (TYPE_UNSIGNED (type))
12784 encoding = DW_ATE_unsigned;
12785 else
12786 encoding = DW_ATE_signed;
12787
12788 if (!dwarf_strict
12789 && lang_hooks.types.get_type_bias)
12790 type_bias = lang_hooks.types.get_type_bias (type);
12791 break;
12792
12793 case REAL_TYPE:
12794 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12795 {
12796 if (dwarf_version >= 3 || !dwarf_strict)
12797 encoding = DW_ATE_decimal_float;
12798 else
12799 encoding = DW_ATE_lo_user;
12800 }
12801 else
12802 encoding = DW_ATE_float;
12803 break;
12804
12805 case FIXED_POINT_TYPE:
12806 if (!(dwarf_version >= 3 || !dwarf_strict))
12807 encoding = DW_ATE_lo_user;
12808 else if (TYPE_UNSIGNED (type))
12809 encoding = DW_ATE_unsigned_fixed;
12810 else
12811 encoding = DW_ATE_signed_fixed;
12812 break;
12813
12814 /* Dwarf2 doesn't know anything about complex ints, so use
12815 a user defined type for it. */
12816 case COMPLEX_TYPE:
12817 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12818 encoding = DW_ATE_complex_float;
12819 else
12820 encoding = DW_ATE_lo_user;
12821 break;
12822
12823 case BOOLEAN_TYPE:
12824 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12825 encoding = DW_ATE_boolean;
12826 break;
12827
12828 default:
12829 /* No other TREE_CODEs are Dwarf fundamental types. */
12830 gcc_unreachable ();
12831 }
12832
12833 base_type_result = new_die_raw (DW_TAG_base_type);
12834
12835 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12836 int_size_in_bytes (type));
12837 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12838
12839 if (need_endianity_attribute_p (reverse))
12840 add_AT_unsigned (base_type_result, DW_AT_endianity,
12841 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12842
12843 add_alignment_attribute (base_type_result, type);
12844
12845 if (fpt_used)
12846 {
12847 switch (fpt_info.scale_factor_kind)
12848 {
12849 case fixed_point_scale_factor_binary:
12850 add_AT_int (base_type_result, DW_AT_binary_scale,
12851 fpt_info.scale_factor.binary);
12852 break;
12853
12854 case fixed_point_scale_factor_decimal:
12855 add_AT_int (base_type_result, DW_AT_decimal_scale,
12856 fpt_info.scale_factor.decimal);
12857 break;
12858
12859 case fixed_point_scale_factor_arbitrary:
12860 /* Arbitrary scale factors cannot be described in standard DWARF,
12861 yet. */
12862 if (!dwarf_strict)
12863 {
12864 /* Describe the scale factor as a rational constant. */
12865 const dw_die_ref scale_factor
12866 = new_die (DW_TAG_constant, comp_unit_die (), type);
12867
12868 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12869 fpt_info.scale_factor.arbitrary.numerator);
12870 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12871 fpt_info.scale_factor.arbitrary.denominator);
12872
12873 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12874 }
12875 break;
12876
12877 default:
12878 gcc_unreachable ();
12879 }
12880 }
12881
12882 if (type_bias)
12883 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12884 dw_scalar_form_constant
12885 | dw_scalar_form_exprloc
12886 | dw_scalar_form_reference,
12887 NULL);
12888
12889 return base_type_result;
12890 }
12891
12892 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12893 named 'auto' in its type: return true for it, false otherwise. */
12894
12895 static inline bool
12896 is_cxx_auto (tree type)
12897 {
12898 if (is_cxx ())
12899 {
12900 tree name = TYPE_IDENTIFIER (type);
12901 if (name == get_identifier ("auto")
12902 || name == get_identifier ("decltype(auto)"))
12903 return true;
12904 }
12905 return false;
12906 }
12907
12908 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12909 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12910
12911 static inline int
12912 is_base_type (tree type)
12913 {
12914 switch (TREE_CODE (type))
12915 {
12916 case INTEGER_TYPE:
12917 case REAL_TYPE:
12918 case FIXED_POINT_TYPE:
12919 case COMPLEX_TYPE:
12920 case BOOLEAN_TYPE:
12921 case POINTER_BOUNDS_TYPE:
12922 return 1;
12923
12924 case VOID_TYPE:
12925 case ARRAY_TYPE:
12926 case RECORD_TYPE:
12927 case UNION_TYPE:
12928 case QUAL_UNION_TYPE:
12929 case ENUMERAL_TYPE:
12930 case FUNCTION_TYPE:
12931 case METHOD_TYPE:
12932 case POINTER_TYPE:
12933 case REFERENCE_TYPE:
12934 case NULLPTR_TYPE:
12935 case OFFSET_TYPE:
12936 case LANG_TYPE:
12937 case VECTOR_TYPE:
12938 return 0;
12939
12940 default:
12941 if (is_cxx_auto (type))
12942 return 0;
12943 gcc_unreachable ();
12944 }
12945
12946 return 0;
12947 }
12948
12949 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12950 node, return the size in bits for the type if it is a constant, or else
12951 return the alignment for the type if the type's size is not constant, or
12952 else return BITS_PER_WORD if the type actually turns out to be an
12953 ERROR_MARK node. */
12954
12955 static inline unsigned HOST_WIDE_INT
12956 simple_type_size_in_bits (const_tree type)
12957 {
12958 if (TREE_CODE (type) == ERROR_MARK)
12959 return BITS_PER_WORD;
12960 else if (TYPE_SIZE (type) == NULL_TREE)
12961 return 0;
12962 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12963 return tree_to_uhwi (TYPE_SIZE (type));
12964 else
12965 return TYPE_ALIGN (type);
12966 }
12967
12968 /* Similarly, but return an offset_int instead of UHWI. */
12969
12970 static inline offset_int
12971 offset_int_type_size_in_bits (const_tree type)
12972 {
12973 if (TREE_CODE (type) == ERROR_MARK)
12974 return BITS_PER_WORD;
12975 else if (TYPE_SIZE (type) == NULL_TREE)
12976 return 0;
12977 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12978 return wi::to_offset (TYPE_SIZE (type));
12979 else
12980 return TYPE_ALIGN (type);
12981 }
12982
12983 /* Given a pointer to a tree node for a subrange type, return a pointer
12984 to a DIE that describes the given type. */
12985
12986 static dw_die_ref
12987 subrange_type_die (tree type, tree low, tree high, tree bias,
12988 dw_die_ref context_die)
12989 {
12990 dw_die_ref subrange_die;
12991 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12992
12993 if (context_die == NULL)
12994 context_die = comp_unit_die ();
12995
12996 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12997
12998 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12999 {
13000 /* The size of the subrange type and its base type do not match,
13001 so we need to generate a size attribute for the subrange type. */
13002 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13003 }
13004
13005 add_alignment_attribute (subrange_die, type);
13006
13007 if (low)
13008 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13009 if (high)
13010 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13011 if (bias && !dwarf_strict)
13012 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13013 dw_scalar_form_constant
13014 | dw_scalar_form_exprloc
13015 | dw_scalar_form_reference,
13016 NULL);
13017
13018 return subrange_die;
13019 }
13020
13021 /* Returns the (const and/or volatile) cv_qualifiers associated with
13022 the decl node. This will normally be augmented with the
13023 cv_qualifiers of the underlying type in add_type_attribute. */
13024
13025 static int
13026 decl_quals (const_tree decl)
13027 {
13028 return ((TREE_READONLY (decl)
13029 /* The C++ front-end correctly marks reference-typed
13030 variables as readonly, but from a language (and debug
13031 info) standpoint they are not const-qualified. */
13032 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13033 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13034 | (TREE_THIS_VOLATILE (decl)
13035 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13036 }
13037
13038 /* Determine the TYPE whose qualifiers match the largest strict subset
13039 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13040 qualifiers outside QUAL_MASK. */
13041
13042 static int
13043 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13044 {
13045 tree t;
13046 int best_rank = 0, best_qual = 0, max_rank;
13047
13048 type_quals &= qual_mask;
13049 max_rank = popcount_hwi (type_quals) - 1;
13050
13051 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13052 t = TYPE_NEXT_VARIANT (t))
13053 {
13054 int q = TYPE_QUALS (t) & qual_mask;
13055
13056 if ((q & type_quals) == q && q != type_quals
13057 && check_base_type (t, type))
13058 {
13059 int rank = popcount_hwi (q);
13060
13061 if (rank > best_rank)
13062 {
13063 best_rank = rank;
13064 best_qual = q;
13065 }
13066 }
13067 }
13068
13069 return best_qual;
13070 }
13071
13072 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13073 static const dwarf_qual_info_t dwarf_qual_info[] =
13074 {
13075 { TYPE_QUAL_CONST, DW_TAG_const_type },
13076 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13077 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13078 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13079 };
13080 static const unsigned int dwarf_qual_info_size
13081 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13082
13083 /* If DIE is a qualified DIE of some base DIE with the same parent,
13084 return the base DIE, otherwise return NULL. Set MASK to the
13085 qualifiers added compared to the returned DIE. */
13086
13087 static dw_die_ref
13088 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13089 {
13090 unsigned int i;
13091 for (i = 0; i < dwarf_qual_info_size; i++)
13092 if (die->die_tag == dwarf_qual_info[i].t)
13093 break;
13094 if (i == dwarf_qual_info_size)
13095 return NULL;
13096 if (vec_safe_length (die->die_attr) != 1)
13097 return NULL;
13098 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13099 if (type == NULL || type->die_parent != die->die_parent)
13100 return NULL;
13101 *mask |= dwarf_qual_info[i].q;
13102 if (depth)
13103 {
13104 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13105 if (ret)
13106 return ret;
13107 }
13108 return type;
13109 }
13110
13111 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13112 entry that chains the modifiers specified by CV_QUALS in front of the
13113 given type. REVERSE is true if the type is to be interpreted in the
13114 reverse storage order wrt the target order. */
13115
13116 static dw_die_ref
13117 modified_type_die (tree type, int cv_quals, bool reverse,
13118 dw_die_ref context_die)
13119 {
13120 enum tree_code code = TREE_CODE (type);
13121 dw_die_ref mod_type_die;
13122 dw_die_ref sub_die = NULL;
13123 tree item_type = NULL;
13124 tree qualified_type;
13125 tree name, low, high;
13126 dw_die_ref mod_scope;
13127 /* Only these cv-qualifiers are currently handled. */
13128 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13129 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13130 ENCODE_QUAL_ADDR_SPACE(~0U));
13131 const bool reverse_base_type
13132 = need_endianity_attribute_p (reverse) && is_base_type (type);
13133
13134 if (code == ERROR_MARK)
13135 return NULL;
13136
13137 if (lang_hooks.types.get_debug_type)
13138 {
13139 tree debug_type = lang_hooks.types.get_debug_type (type);
13140
13141 if (debug_type != NULL_TREE && debug_type != type)
13142 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13143 }
13144
13145 cv_quals &= cv_qual_mask;
13146
13147 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13148 tag modifier (and not an attribute) old consumers won't be able
13149 to handle it. */
13150 if (dwarf_version < 3)
13151 cv_quals &= ~TYPE_QUAL_RESTRICT;
13152
13153 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13154 if (dwarf_version < 5)
13155 cv_quals &= ~TYPE_QUAL_ATOMIC;
13156
13157 /* See if we already have the appropriately qualified variant of
13158 this type. */
13159 qualified_type = get_qualified_type (type, cv_quals);
13160
13161 if (qualified_type == sizetype)
13162 {
13163 /* Try not to expose the internal sizetype type's name. */
13164 if (TYPE_NAME (qualified_type)
13165 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13166 {
13167 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13168
13169 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13170 && (TYPE_PRECISION (t)
13171 == TYPE_PRECISION (qualified_type))
13172 && (TYPE_UNSIGNED (t)
13173 == TYPE_UNSIGNED (qualified_type)));
13174 qualified_type = t;
13175 }
13176 else if (qualified_type == sizetype
13177 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13178 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13179 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13180 qualified_type = size_type_node;
13181 }
13182
13183 /* If we do, then we can just use its DIE, if it exists. */
13184 if (qualified_type)
13185 {
13186 mod_type_die = lookup_type_die (qualified_type);
13187
13188 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13189 dealt with specially: the DIE with the attribute, if it exists, is
13190 placed immediately after the regular DIE for the same base type. */
13191 if (mod_type_die
13192 && (!reverse_base_type
13193 || ((mod_type_die = mod_type_die->die_sib) != NULL
13194 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13195 return mod_type_die;
13196 }
13197
13198 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13199
13200 /* Handle C typedef types. */
13201 if (name
13202 && TREE_CODE (name) == TYPE_DECL
13203 && DECL_ORIGINAL_TYPE (name)
13204 && !DECL_ARTIFICIAL (name))
13205 {
13206 tree dtype = TREE_TYPE (name);
13207
13208 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13209 if (qualified_type == dtype && !reverse_base_type)
13210 {
13211 tree origin = decl_ultimate_origin (name);
13212
13213 /* Typedef variants that have an abstract origin don't get their own
13214 type DIE (see gen_typedef_die), so fall back on the ultimate
13215 abstract origin instead. */
13216 if (origin != NULL && origin != name)
13217 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13218 context_die);
13219
13220 /* For a named type, use the typedef. */
13221 gen_type_die (qualified_type, context_die);
13222 return lookup_type_die (qualified_type);
13223 }
13224 else
13225 {
13226 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13227 dquals &= cv_qual_mask;
13228 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13229 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13230 /* cv-unqualified version of named type. Just use
13231 the unnamed type to which it refers. */
13232 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13233 reverse, context_die);
13234 /* Else cv-qualified version of named type; fall through. */
13235 }
13236 }
13237
13238 mod_scope = scope_die_for (type, context_die);
13239
13240 if (cv_quals)
13241 {
13242 int sub_quals = 0, first_quals = 0;
13243 unsigned i;
13244 dw_die_ref first = NULL, last = NULL;
13245
13246 /* Determine a lesser qualified type that most closely matches
13247 this one. Then generate DW_TAG_* entries for the remaining
13248 qualifiers. */
13249 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13250 cv_qual_mask);
13251 if (sub_quals && use_debug_types)
13252 {
13253 bool needed = false;
13254 /* If emitting type units, make sure the order of qualifiers
13255 is canonical. Thus, start from unqualified type if
13256 an earlier qualifier is missing in sub_quals, but some later
13257 one is present there. */
13258 for (i = 0; i < dwarf_qual_info_size; i++)
13259 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13260 needed = true;
13261 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13262 {
13263 sub_quals = 0;
13264 break;
13265 }
13266 }
13267 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13268 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13269 {
13270 /* As not all intermediate qualified DIEs have corresponding
13271 tree types, ensure that qualified DIEs in the same scope
13272 as their DW_AT_type are emitted after their DW_AT_type,
13273 only with other qualified DIEs for the same type possibly
13274 in between them. Determine the range of such qualified
13275 DIEs now (first being the base type, last being corresponding
13276 last qualified DIE for it). */
13277 unsigned int count = 0;
13278 first = qualified_die_p (mod_type_die, &first_quals,
13279 dwarf_qual_info_size);
13280 if (first == NULL)
13281 first = mod_type_die;
13282 gcc_assert ((first_quals & ~sub_quals) == 0);
13283 for (count = 0, last = first;
13284 count < (1U << dwarf_qual_info_size);
13285 count++, last = last->die_sib)
13286 {
13287 int quals = 0;
13288 if (last == mod_scope->die_child)
13289 break;
13290 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13291 != first)
13292 break;
13293 }
13294 }
13295
13296 for (i = 0; i < dwarf_qual_info_size; i++)
13297 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13298 {
13299 dw_die_ref d;
13300 if (first && first != last)
13301 {
13302 for (d = first->die_sib; ; d = d->die_sib)
13303 {
13304 int quals = 0;
13305 qualified_die_p (d, &quals, dwarf_qual_info_size);
13306 if (quals == (first_quals | dwarf_qual_info[i].q))
13307 break;
13308 if (d == last)
13309 {
13310 d = NULL;
13311 break;
13312 }
13313 }
13314 if (d)
13315 {
13316 mod_type_die = d;
13317 continue;
13318 }
13319 }
13320 if (first)
13321 {
13322 d = new_die_raw (dwarf_qual_info[i].t);
13323 add_child_die_after (mod_scope, d, last);
13324 last = d;
13325 }
13326 else
13327 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13328 if (mod_type_die)
13329 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13330 mod_type_die = d;
13331 first_quals |= dwarf_qual_info[i].q;
13332 }
13333 }
13334 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13335 {
13336 dwarf_tag tag = DW_TAG_pointer_type;
13337 if (code == REFERENCE_TYPE)
13338 {
13339 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13340 tag = DW_TAG_rvalue_reference_type;
13341 else
13342 tag = DW_TAG_reference_type;
13343 }
13344 mod_type_die = new_die (tag, mod_scope, type);
13345
13346 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13347 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13348 add_alignment_attribute (mod_type_die, type);
13349 item_type = TREE_TYPE (type);
13350
13351 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13352 if (!ADDR_SPACE_GENERIC_P (as))
13353 {
13354 int action = targetm.addr_space.debug (as);
13355 if (action >= 0)
13356 {
13357 /* Positive values indicate an address_class. */
13358 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13359 }
13360 else
13361 {
13362 /* Negative values indicate an (inverted) segment base reg. */
13363 dw_loc_descr_ref d
13364 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13365 add_AT_loc (mod_type_die, DW_AT_segment, d);
13366 }
13367 }
13368 }
13369 else if (code == INTEGER_TYPE
13370 && TREE_TYPE (type) != NULL_TREE
13371 && subrange_type_for_debug_p (type, &low, &high))
13372 {
13373 tree bias = NULL_TREE;
13374 if (lang_hooks.types.get_type_bias)
13375 bias = lang_hooks.types.get_type_bias (type);
13376 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13377 item_type = TREE_TYPE (type);
13378 }
13379 else if (is_base_type (type))
13380 {
13381 mod_type_die = base_type_die (type, reverse);
13382
13383 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13384 if (reverse_base_type)
13385 {
13386 dw_die_ref after_die
13387 = modified_type_die (type, cv_quals, false, context_die);
13388 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13389 }
13390 else
13391 add_child_die (comp_unit_die (), mod_type_die);
13392
13393 add_pubtype (type, mod_type_die);
13394 }
13395 else
13396 {
13397 gen_type_die (type, context_die);
13398
13399 /* We have to get the type_main_variant here (and pass that to the
13400 `lookup_type_die' routine) because the ..._TYPE node we have
13401 might simply be a *copy* of some original type node (where the
13402 copy was created to help us keep track of typedef names) and
13403 that copy might have a different TYPE_UID from the original
13404 ..._TYPE node. */
13405 if (TREE_CODE (type) == FUNCTION_TYPE
13406 || TREE_CODE (type) == METHOD_TYPE)
13407 {
13408 /* For function/method types, can't just use type_main_variant here,
13409 because that can have different ref-qualifiers for C++,
13410 but try to canonicalize. */
13411 tree main = TYPE_MAIN_VARIANT (type);
13412 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13413 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13414 && check_base_type (t, main)
13415 && check_lang_type (t, type))
13416 return lookup_type_die (t);
13417 return lookup_type_die (type);
13418 }
13419 else if (TREE_CODE (type) != VECTOR_TYPE
13420 && TREE_CODE (type) != ARRAY_TYPE)
13421 return lookup_type_die (type_main_variant (type));
13422 else
13423 /* Vectors have the debugging information in the type,
13424 not the main variant. */
13425 return lookup_type_die (type);
13426 }
13427
13428 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13429 don't output a DW_TAG_typedef, since there isn't one in the
13430 user's program; just attach a DW_AT_name to the type.
13431 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13432 if the base type already has the same name. */
13433 if (name
13434 && ((TREE_CODE (name) != TYPE_DECL
13435 && (qualified_type == TYPE_MAIN_VARIANT (type)
13436 || (cv_quals == TYPE_UNQUALIFIED)))
13437 || (TREE_CODE (name) == TYPE_DECL
13438 && TREE_TYPE (name) == qualified_type
13439 && DECL_NAME (name))))
13440 {
13441 if (TREE_CODE (name) == TYPE_DECL)
13442 /* Could just call add_name_and_src_coords_attributes here,
13443 but since this is a builtin type it doesn't have any
13444 useful source coordinates anyway. */
13445 name = DECL_NAME (name);
13446 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13447 }
13448 /* This probably indicates a bug. */
13449 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13450 {
13451 name = TYPE_IDENTIFIER (type);
13452 add_name_attribute (mod_type_die,
13453 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13454 }
13455
13456 if (qualified_type && !reverse_base_type)
13457 equate_type_number_to_die (qualified_type, mod_type_die);
13458
13459 if (item_type)
13460 /* We must do this after the equate_type_number_to_die call, in case
13461 this is a recursive type. This ensures that the modified_type_die
13462 recursion will terminate even if the type is recursive. Recursive
13463 types are possible in Ada. */
13464 sub_die = modified_type_die (item_type,
13465 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13466 reverse,
13467 context_die);
13468
13469 if (sub_die != NULL)
13470 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13471
13472 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13473 if (TYPE_ARTIFICIAL (type))
13474 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13475
13476 return mod_type_die;
13477 }
13478
13479 /* Generate DIEs for the generic parameters of T.
13480 T must be either a generic type or a generic function.
13481 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13482
13483 static void
13484 gen_generic_params_dies (tree t)
13485 {
13486 tree parms, args;
13487 int parms_num, i;
13488 dw_die_ref die = NULL;
13489 int non_default;
13490
13491 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13492 return;
13493
13494 if (TYPE_P (t))
13495 die = lookup_type_die (t);
13496 else if (DECL_P (t))
13497 die = lookup_decl_die (t);
13498
13499 gcc_assert (die);
13500
13501 parms = lang_hooks.get_innermost_generic_parms (t);
13502 if (!parms)
13503 /* T has no generic parameter. It means T is neither a generic type
13504 or function. End of story. */
13505 return;
13506
13507 parms_num = TREE_VEC_LENGTH (parms);
13508 args = lang_hooks.get_innermost_generic_args (t);
13509 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13510 non_default = int_cst_value (TREE_CHAIN (args));
13511 else
13512 non_default = TREE_VEC_LENGTH (args);
13513 for (i = 0; i < parms_num; i++)
13514 {
13515 tree parm, arg, arg_pack_elems;
13516 dw_die_ref parm_die;
13517
13518 parm = TREE_VEC_ELT (parms, i);
13519 arg = TREE_VEC_ELT (args, i);
13520 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13521 gcc_assert (parm && TREE_VALUE (parm) && arg);
13522
13523 if (parm && TREE_VALUE (parm) && arg)
13524 {
13525 /* If PARM represents a template parameter pack,
13526 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13527 by DW_TAG_template_*_parameter DIEs for the argument
13528 pack elements of ARG. Note that ARG would then be
13529 an argument pack. */
13530 if (arg_pack_elems)
13531 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13532 arg_pack_elems,
13533 die);
13534 else
13535 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13536 true /* emit name */, die);
13537 if (i >= non_default)
13538 add_AT_flag (parm_die, DW_AT_default_value, 1);
13539 }
13540 }
13541 }
13542
13543 /* Create and return a DIE for PARM which should be
13544 the representation of a generic type parameter.
13545 For instance, in the C++ front end, PARM would be a template parameter.
13546 ARG is the argument to PARM.
13547 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13548 name of the PARM.
13549 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13550 as a child node. */
13551
13552 static dw_die_ref
13553 generic_parameter_die (tree parm, tree arg,
13554 bool emit_name_p,
13555 dw_die_ref parent_die)
13556 {
13557 dw_die_ref tmpl_die = NULL;
13558 const char *name = NULL;
13559
13560 if (!parm || !DECL_NAME (parm) || !arg)
13561 return NULL;
13562
13563 /* We support non-type generic parameters and arguments,
13564 type generic parameters and arguments, as well as
13565 generic generic parameters (a.k.a. template template parameters in C++)
13566 and arguments. */
13567 if (TREE_CODE (parm) == PARM_DECL)
13568 /* PARM is a nontype generic parameter */
13569 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13570 else if (TREE_CODE (parm) == TYPE_DECL)
13571 /* PARM is a type generic parameter. */
13572 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13573 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13574 /* PARM is a generic generic parameter.
13575 Its DIE is a GNU extension. It shall have a
13576 DW_AT_name attribute to represent the name of the template template
13577 parameter, and a DW_AT_GNU_template_name attribute to represent the
13578 name of the template template argument. */
13579 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13580 parent_die, parm);
13581 else
13582 gcc_unreachable ();
13583
13584 if (tmpl_die)
13585 {
13586 tree tmpl_type;
13587
13588 /* If PARM is a generic parameter pack, it means we are
13589 emitting debug info for a template argument pack element.
13590 In other terms, ARG is a template argument pack element.
13591 In that case, we don't emit any DW_AT_name attribute for
13592 the die. */
13593 if (emit_name_p)
13594 {
13595 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13596 gcc_assert (name);
13597 add_AT_string (tmpl_die, DW_AT_name, name);
13598 }
13599
13600 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13601 {
13602 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13603 TMPL_DIE should have a child DW_AT_type attribute that is set
13604 to the type of the argument to PARM, which is ARG.
13605 If PARM is a type generic parameter, TMPL_DIE should have a
13606 child DW_AT_type that is set to ARG. */
13607 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13608 add_type_attribute (tmpl_die, tmpl_type,
13609 (TREE_THIS_VOLATILE (tmpl_type)
13610 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13611 false, parent_die);
13612 }
13613 else
13614 {
13615 /* So TMPL_DIE is a DIE representing a
13616 a generic generic template parameter, a.k.a template template
13617 parameter in C++ and arg is a template. */
13618
13619 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13620 to the name of the argument. */
13621 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13622 if (name)
13623 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13624 }
13625
13626 if (TREE_CODE (parm) == PARM_DECL)
13627 /* So PARM is a non-type generic parameter.
13628 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13629 attribute of TMPL_DIE which value represents the value
13630 of ARG.
13631 We must be careful here:
13632 The value of ARG might reference some function decls.
13633 We might currently be emitting debug info for a generic
13634 type and types are emitted before function decls, we don't
13635 know if the function decls referenced by ARG will actually be
13636 emitted after cgraph computations.
13637 So must defer the generation of the DW_AT_const_value to
13638 after cgraph is ready. */
13639 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13640 }
13641
13642 return tmpl_die;
13643 }
13644
13645 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13646 PARM_PACK must be a template parameter pack. The returned DIE
13647 will be child DIE of PARENT_DIE. */
13648
13649 static dw_die_ref
13650 template_parameter_pack_die (tree parm_pack,
13651 tree parm_pack_args,
13652 dw_die_ref parent_die)
13653 {
13654 dw_die_ref die;
13655 int j;
13656
13657 gcc_assert (parent_die && parm_pack);
13658
13659 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13660 add_name_and_src_coords_attributes (die, parm_pack);
13661 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13662 generic_parameter_die (parm_pack,
13663 TREE_VEC_ELT (parm_pack_args, j),
13664 false /* Don't emit DW_AT_name */,
13665 die);
13666 return die;
13667 }
13668
13669 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13670 an enumerated type. */
13671
13672 static inline int
13673 type_is_enum (const_tree type)
13674 {
13675 return TREE_CODE (type) == ENUMERAL_TYPE;
13676 }
13677
13678 /* Return the DBX register number described by a given RTL node. */
13679
13680 static unsigned int
13681 dbx_reg_number (const_rtx rtl)
13682 {
13683 unsigned regno = REGNO (rtl);
13684
13685 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13686
13687 #ifdef LEAF_REG_REMAP
13688 if (crtl->uses_only_leaf_regs)
13689 {
13690 int leaf_reg = LEAF_REG_REMAP (regno);
13691 if (leaf_reg != -1)
13692 regno = (unsigned) leaf_reg;
13693 }
13694 #endif
13695
13696 regno = DBX_REGISTER_NUMBER (regno);
13697 gcc_assert (regno != INVALID_REGNUM);
13698 return regno;
13699 }
13700
13701 /* Optionally add a DW_OP_piece term to a location description expression.
13702 DW_OP_piece is only added if the location description expression already
13703 doesn't end with DW_OP_piece. */
13704
13705 static void
13706 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13707 {
13708 dw_loc_descr_ref loc;
13709
13710 if (*list_head != NULL)
13711 {
13712 /* Find the end of the chain. */
13713 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13714 ;
13715
13716 if (loc->dw_loc_opc != DW_OP_piece)
13717 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13718 }
13719 }
13720
13721 /* Return a location descriptor that designates a machine register or
13722 zero if there is none. */
13723
13724 static dw_loc_descr_ref
13725 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13726 {
13727 rtx regs;
13728
13729 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13730 return 0;
13731
13732 /* We only use "frame base" when we're sure we're talking about the
13733 post-prologue local stack frame. We do this by *not* running
13734 register elimination until this point, and recognizing the special
13735 argument pointer and soft frame pointer rtx's.
13736 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13737 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13738 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13739 {
13740 dw_loc_descr_ref result = NULL;
13741
13742 if (dwarf_version >= 4 || !dwarf_strict)
13743 {
13744 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13745 initialized);
13746 if (result)
13747 add_loc_descr (&result,
13748 new_loc_descr (DW_OP_stack_value, 0, 0));
13749 }
13750 return result;
13751 }
13752
13753 regs = targetm.dwarf_register_span (rtl);
13754
13755 if (REG_NREGS (rtl) > 1 || regs)
13756 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13757 else
13758 {
13759 unsigned int dbx_regnum = dbx_reg_number (rtl);
13760 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13761 return 0;
13762 return one_reg_loc_descriptor (dbx_regnum, initialized);
13763 }
13764 }
13765
13766 /* Return a location descriptor that designates a machine register for
13767 a given hard register number. */
13768
13769 static dw_loc_descr_ref
13770 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13771 {
13772 dw_loc_descr_ref reg_loc_descr;
13773
13774 if (regno <= 31)
13775 reg_loc_descr
13776 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13777 else
13778 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13779
13780 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13781 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13782
13783 return reg_loc_descr;
13784 }
13785
13786 /* Given an RTL of a register, return a location descriptor that
13787 designates a value that spans more than one register. */
13788
13789 static dw_loc_descr_ref
13790 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13791 enum var_init_status initialized)
13792 {
13793 int size, i;
13794 dw_loc_descr_ref loc_result = NULL;
13795
13796 /* Simple, contiguous registers. */
13797 if (regs == NULL_RTX)
13798 {
13799 unsigned reg = REGNO (rtl);
13800 int nregs;
13801
13802 #ifdef LEAF_REG_REMAP
13803 if (crtl->uses_only_leaf_regs)
13804 {
13805 int leaf_reg = LEAF_REG_REMAP (reg);
13806 if (leaf_reg != -1)
13807 reg = (unsigned) leaf_reg;
13808 }
13809 #endif
13810
13811 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13812 nregs = REG_NREGS (rtl);
13813
13814 /* At present we only track constant-sized pieces. */
13815 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13816 return NULL;
13817 size /= nregs;
13818
13819 loc_result = NULL;
13820 while (nregs--)
13821 {
13822 dw_loc_descr_ref t;
13823
13824 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13825 VAR_INIT_STATUS_INITIALIZED);
13826 add_loc_descr (&loc_result, t);
13827 add_loc_descr_op_piece (&loc_result, size);
13828 ++reg;
13829 }
13830 return loc_result;
13831 }
13832
13833 /* Now onto stupid register sets in non contiguous locations. */
13834
13835 gcc_assert (GET_CODE (regs) == PARALLEL);
13836
13837 /* At present we only track constant-sized pieces. */
13838 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13839 return NULL;
13840 loc_result = NULL;
13841
13842 for (i = 0; i < XVECLEN (regs, 0); ++i)
13843 {
13844 dw_loc_descr_ref t;
13845
13846 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13847 VAR_INIT_STATUS_INITIALIZED);
13848 add_loc_descr (&loc_result, t);
13849 add_loc_descr_op_piece (&loc_result, size);
13850 }
13851
13852 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13853 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13854 return loc_result;
13855 }
13856
13857 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13858
13859 /* Return a location descriptor that designates a constant i,
13860 as a compound operation from constant (i >> shift), constant shift
13861 and DW_OP_shl. */
13862
13863 static dw_loc_descr_ref
13864 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13865 {
13866 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13867 add_loc_descr (&ret, int_loc_descriptor (shift));
13868 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13869 return ret;
13870 }
13871
13872 /* Return a location descriptor that designates constant POLY_I. */
13873
13874 static dw_loc_descr_ref
13875 int_loc_descriptor (poly_int64 poly_i)
13876 {
13877 enum dwarf_location_atom op;
13878
13879 HOST_WIDE_INT i;
13880 if (!poly_i.is_constant (&i))
13881 {
13882 /* Create location descriptions for the non-constant part and
13883 add any constant offset at the end. */
13884 dw_loc_descr_ref ret = NULL;
13885 HOST_WIDE_INT constant = poly_i.coeffs[0];
13886 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13887 {
13888 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13889 if (coeff != 0)
13890 {
13891 dw_loc_descr_ref start = ret;
13892 unsigned int factor;
13893 int bias;
13894 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13895 (j, &factor, &bias);
13896
13897 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13898 add COEFF * (REGNO / FACTOR) now and subtract
13899 COEFF * BIAS from the final constant part. */
13900 constant -= coeff * bias;
13901 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13902 if (coeff % factor == 0)
13903 coeff /= factor;
13904 else
13905 {
13906 int amount = exact_log2 (factor);
13907 gcc_assert (amount >= 0);
13908 add_loc_descr (&ret, int_loc_descriptor (amount));
13909 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13910 }
13911 if (coeff != 1)
13912 {
13913 add_loc_descr (&ret, int_loc_descriptor (coeff));
13914 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13915 }
13916 if (start)
13917 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13918 }
13919 }
13920 loc_descr_plus_const (&ret, constant);
13921 return ret;
13922 }
13923
13924 /* Pick the smallest representation of a constant, rather than just
13925 defaulting to the LEB encoding. */
13926 if (i >= 0)
13927 {
13928 int clz = clz_hwi (i);
13929 int ctz = ctz_hwi (i);
13930 if (i <= 31)
13931 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13932 else if (i <= 0xff)
13933 op = DW_OP_const1u;
13934 else if (i <= 0xffff)
13935 op = DW_OP_const2u;
13936 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13937 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13938 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13939 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13940 while DW_OP_const4u is 5 bytes. */
13941 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13942 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13943 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13944 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13945 while DW_OP_const4u is 5 bytes. */
13946 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13947
13948 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13949 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13950 <= 4)
13951 {
13952 /* As i >= 2**31, the double cast above will yield a negative number.
13953 Since wrapping is defined in DWARF expressions we can output big
13954 positive integers as small negative ones, regardless of the size
13955 of host wide ints.
13956
13957 Here, since the evaluator will handle 32-bit values and since i >=
13958 2**31, we know it's going to be interpreted as a negative literal:
13959 store it this way if we can do better than 5 bytes this way. */
13960 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13961 }
13962 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13963 op = DW_OP_const4u;
13964
13965 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13966 least 6 bytes: see if we can do better before falling back to it. */
13967 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13968 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13969 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13970 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13971 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13972 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13973 >= HOST_BITS_PER_WIDE_INT)
13974 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13975 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13976 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13977 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13978 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13979 && size_of_uleb128 (i) > 6)
13980 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13981 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13982 else
13983 op = DW_OP_constu;
13984 }
13985 else
13986 {
13987 if (i >= -0x80)
13988 op = DW_OP_const1s;
13989 else if (i >= -0x8000)
13990 op = DW_OP_const2s;
13991 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13992 {
13993 if (size_of_int_loc_descriptor (i) < 5)
13994 {
13995 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13996 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13997 return ret;
13998 }
13999 op = DW_OP_const4s;
14000 }
14001 else
14002 {
14003 if (size_of_int_loc_descriptor (i)
14004 < (unsigned long) 1 + size_of_sleb128 (i))
14005 {
14006 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14007 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14008 return ret;
14009 }
14010 op = DW_OP_consts;
14011 }
14012 }
14013
14014 return new_loc_descr (op, i, 0);
14015 }
14016
14017 /* Likewise, for unsigned constants. */
14018
14019 static dw_loc_descr_ref
14020 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14021 {
14022 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14023 const unsigned HOST_WIDE_INT max_uint
14024 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14025
14026 /* If possible, use the clever signed constants handling. */
14027 if (i <= max_int)
14028 return int_loc_descriptor ((HOST_WIDE_INT) i);
14029
14030 /* Here, we are left with positive numbers that cannot be represented as
14031 HOST_WIDE_INT, i.e.:
14032 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14033
14034 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14035 whereas may be better to output a negative integer: thanks to integer
14036 wrapping, we know that:
14037 x = x - 2 ** DWARF2_ADDR_SIZE
14038 = x - 2 * (max (HOST_WIDE_INT) + 1)
14039 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14040 small negative integers. Let's try that in cases it will clearly improve
14041 the encoding: there is no gain turning DW_OP_const4u into
14042 DW_OP_const4s. */
14043 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14044 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14045 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14046 {
14047 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14048
14049 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14050 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14051 const HOST_WIDE_INT second_shift
14052 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14053
14054 /* So we finally have:
14055 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14056 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14057 return int_loc_descriptor (second_shift);
14058 }
14059
14060 /* Last chance: fallback to a simple constant operation. */
14061 return new_loc_descr
14062 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14063 ? DW_OP_const4u
14064 : DW_OP_const8u,
14065 i, 0);
14066 }
14067
14068 /* Generate and return a location description that computes the unsigned
14069 comparison of the two stack top entries (a OP b where b is the top-most
14070 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14071 LE_EXPR, GT_EXPR or GE_EXPR. */
14072
14073 static dw_loc_descr_ref
14074 uint_comparison_loc_list (enum tree_code kind)
14075 {
14076 enum dwarf_location_atom op, flip_op;
14077 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14078
14079 switch (kind)
14080 {
14081 case LT_EXPR:
14082 op = DW_OP_lt;
14083 break;
14084 case LE_EXPR:
14085 op = DW_OP_le;
14086 break;
14087 case GT_EXPR:
14088 op = DW_OP_gt;
14089 break;
14090 case GE_EXPR:
14091 op = DW_OP_ge;
14092 break;
14093 default:
14094 gcc_unreachable ();
14095 }
14096
14097 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14098 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14099
14100 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14101 possible to perform unsigned comparisons: we just have to distinguish
14102 three cases:
14103
14104 1. when a and b have the same sign (as signed integers); then we should
14105 return: a OP(signed) b;
14106
14107 2. when a is a negative signed integer while b is a positive one, then a
14108 is a greater unsigned integer than b; likewise when a and b's roles
14109 are flipped.
14110
14111 So first, compare the sign of the two operands. */
14112 ret = new_loc_descr (DW_OP_over, 0, 0);
14113 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14114 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14115 /* If they have different signs (i.e. they have different sign bits), then
14116 the stack top value has now the sign bit set and thus it's smaller than
14117 zero. */
14118 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14119 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14120 add_loc_descr (&ret, bra_node);
14121
14122 /* We are in case 1. At this point, we know both operands have the same
14123 sign, to it's safe to use the built-in signed comparison. */
14124 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14125 add_loc_descr (&ret, jmp_node);
14126
14127 /* We are in case 2. Here, we know both operands do not have the same sign,
14128 so we have to flip the signed comparison. */
14129 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14130 tmp = new_loc_descr (flip_op, 0, 0);
14131 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14132 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14133 add_loc_descr (&ret, tmp);
14134
14135 /* This dummy operation is necessary to make the two branches join. */
14136 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14137 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14138 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14139 add_loc_descr (&ret, tmp);
14140
14141 return ret;
14142 }
14143
14144 /* Likewise, but takes the location description lists (might be destructive on
14145 them). Return NULL if either is NULL or if concatenation fails. */
14146
14147 static dw_loc_list_ref
14148 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14149 enum tree_code kind)
14150 {
14151 if (left == NULL || right == NULL)
14152 return NULL;
14153
14154 add_loc_list (&left, right);
14155 if (left == NULL)
14156 return NULL;
14157
14158 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14159 return left;
14160 }
14161
14162 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14163 without actually allocating it. */
14164
14165 static unsigned long
14166 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14167 {
14168 return size_of_int_loc_descriptor (i >> shift)
14169 + size_of_int_loc_descriptor (shift)
14170 + 1;
14171 }
14172
14173 /* Return size_of_locs (int_loc_descriptor (i)) without
14174 actually allocating it. */
14175
14176 static unsigned long
14177 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14178 {
14179 unsigned long s;
14180
14181 if (i >= 0)
14182 {
14183 int clz, ctz;
14184 if (i <= 31)
14185 return 1;
14186 else if (i <= 0xff)
14187 return 2;
14188 else if (i <= 0xffff)
14189 return 3;
14190 clz = clz_hwi (i);
14191 ctz = ctz_hwi (i);
14192 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14193 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14194 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14195 - clz - 5);
14196 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14197 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14198 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14199 - clz - 8);
14200 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14201 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14202 <= 4)
14203 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14204 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14205 return 5;
14206 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14207 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14208 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14209 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14210 - clz - 8);
14211 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14212 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14213 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14214 - clz - 16);
14215 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14216 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14217 && s > 6)
14218 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14219 - clz - 32);
14220 else
14221 return 1 + s;
14222 }
14223 else
14224 {
14225 if (i >= -0x80)
14226 return 2;
14227 else if (i >= -0x8000)
14228 return 3;
14229 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14230 {
14231 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14232 {
14233 s = size_of_int_loc_descriptor (-i) + 1;
14234 if (s < 5)
14235 return s;
14236 }
14237 return 5;
14238 }
14239 else
14240 {
14241 unsigned long r = 1 + size_of_sleb128 (i);
14242 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14243 {
14244 s = size_of_int_loc_descriptor (-i) + 1;
14245 if (s < r)
14246 return s;
14247 }
14248 return r;
14249 }
14250 }
14251 }
14252
14253 /* Return loc description representing "address" of integer value.
14254 This can appear only as toplevel expression. */
14255
14256 static dw_loc_descr_ref
14257 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14258 {
14259 int litsize;
14260 dw_loc_descr_ref loc_result = NULL;
14261
14262 if (!(dwarf_version >= 4 || !dwarf_strict))
14263 return NULL;
14264
14265 litsize = size_of_int_loc_descriptor (i);
14266 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14267 is more compact. For DW_OP_stack_value we need:
14268 litsize + 1 (DW_OP_stack_value)
14269 and for DW_OP_implicit_value:
14270 1 (DW_OP_implicit_value) + 1 (length) + size. */
14271 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14272 {
14273 loc_result = int_loc_descriptor (i);
14274 add_loc_descr (&loc_result,
14275 new_loc_descr (DW_OP_stack_value, 0, 0));
14276 return loc_result;
14277 }
14278
14279 loc_result = new_loc_descr (DW_OP_implicit_value,
14280 size, 0);
14281 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14282 loc_result->dw_loc_oprnd2.v.val_int = i;
14283 return loc_result;
14284 }
14285
14286 /* Return a location descriptor that designates a base+offset location. */
14287
14288 static dw_loc_descr_ref
14289 based_loc_descr (rtx reg, poly_int64 offset,
14290 enum var_init_status initialized)
14291 {
14292 unsigned int regno;
14293 dw_loc_descr_ref result;
14294 dw_fde_ref fde = cfun->fde;
14295
14296 /* We only use "frame base" when we're sure we're talking about the
14297 post-prologue local stack frame. We do this by *not* running
14298 register elimination until this point, and recognizing the special
14299 argument pointer and soft frame pointer rtx's. */
14300 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14301 {
14302 rtx elim = (ira_use_lra_p
14303 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14304 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14305
14306 if (elim != reg)
14307 {
14308 elim = strip_offset_and_add (elim, &offset);
14309 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
14310 && (elim == hard_frame_pointer_rtx
14311 || elim == stack_pointer_rtx))
14312 || elim == (frame_pointer_needed
14313 ? hard_frame_pointer_rtx
14314 : stack_pointer_rtx));
14315
14316 /* If drap register is used to align stack, use frame
14317 pointer + offset to access stack variables. If stack
14318 is aligned without drap, use stack pointer + offset to
14319 access stack variables. */
14320 if (crtl->stack_realign_tried
14321 && reg == frame_pointer_rtx)
14322 {
14323 int base_reg
14324 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14325 ? HARD_FRAME_POINTER_REGNUM
14326 : REGNO (elim));
14327 return new_reg_loc_descr (base_reg, offset);
14328 }
14329
14330 gcc_assert (frame_pointer_fb_offset_valid);
14331 offset += frame_pointer_fb_offset;
14332 HOST_WIDE_INT const_offset;
14333 if (offset.is_constant (&const_offset))
14334 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14335 else
14336 {
14337 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14338 loc_descr_plus_const (&ret, offset);
14339 return ret;
14340 }
14341 }
14342 }
14343
14344 regno = REGNO (reg);
14345 #ifdef LEAF_REG_REMAP
14346 if (crtl->uses_only_leaf_regs)
14347 {
14348 int leaf_reg = LEAF_REG_REMAP (regno);
14349 if (leaf_reg != -1)
14350 regno = (unsigned) leaf_reg;
14351 }
14352 #endif
14353 regno = DWARF_FRAME_REGNUM (regno);
14354
14355 HOST_WIDE_INT const_offset;
14356 if (!optimize && fde
14357 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14358 && offset.is_constant (&const_offset))
14359 {
14360 /* Use cfa+offset to represent the location of arguments passed
14361 on the stack when drap is used to align stack.
14362 Only do this when not optimizing, for optimized code var-tracking
14363 is supposed to track where the arguments live and the register
14364 used as vdrap or drap in some spot might be used for something
14365 else in other part of the routine. */
14366 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14367 }
14368
14369 result = new_reg_loc_descr (regno, offset);
14370
14371 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14372 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14373
14374 return result;
14375 }
14376
14377 /* Return true if this RTL expression describes a base+offset calculation. */
14378
14379 static inline int
14380 is_based_loc (const_rtx rtl)
14381 {
14382 return (GET_CODE (rtl) == PLUS
14383 && ((REG_P (XEXP (rtl, 0))
14384 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14385 && CONST_INT_P (XEXP (rtl, 1)))));
14386 }
14387
14388 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14389 failed. */
14390
14391 static dw_loc_descr_ref
14392 tls_mem_loc_descriptor (rtx mem)
14393 {
14394 tree base;
14395 dw_loc_descr_ref loc_result;
14396
14397 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14398 return NULL;
14399
14400 base = get_base_address (MEM_EXPR (mem));
14401 if (base == NULL
14402 || !VAR_P (base)
14403 || !DECL_THREAD_LOCAL_P (base))
14404 return NULL;
14405
14406 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14407 if (loc_result == NULL)
14408 return NULL;
14409
14410 if (maybe_ne (MEM_OFFSET (mem), 0))
14411 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14412
14413 return loc_result;
14414 }
14415
14416 /* Output debug info about reason why we failed to expand expression as dwarf
14417 expression. */
14418
14419 static void
14420 expansion_failed (tree expr, rtx rtl, char const *reason)
14421 {
14422 if (dump_file && (dump_flags & TDF_DETAILS))
14423 {
14424 fprintf (dump_file, "Failed to expand as dwarf: ");
14425 if (expr)
14426 print_generic_expr (dump_file, expr, dump_flags);
14427 if (rtl)
14428 {
14429 fprintf (dump_file, "\n");
14430 print_rtl (dump_file, rtl);
14431 }
14432 fprintf (dump_file, "\nReason: %s\n", reason);
14433 }
14434 }
14435
14436 /* Helper function for const_ok_for_output. */
14437
14438 static bool
14439 const_ok_for_output_1 (rtx rtl)
14440 {
14441 if (targetm.const_not_ok_for_debug_p (rtl))
14442 {
14443 if (GET_CODE (rtl) != UNSPEC)
14444 {
14445 expansion_failed (NULL_TREE, rtl,
14446 "Expression rejected for debug by the backend.\n");
14447 return false;
14448 }
14449
14450 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14451 the target hook doesn't explicitly allow it in debug info, assume
14452 we can't express it in the debug info. */
14453 /* Don't complain about TLS UNSPECs, those are just too hard to
14454 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14455 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14456 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14457 if (flag_checking
14458 && (XVECLEN (rtl, 0) == 0
14459 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14460 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14461 inform (current_function_decl
14462 ? DECL_SOURCE_LOCATION (current_function_decl)
14463 : UNKNOWN_LOCATION,
14464 #if NUM_UNSPEC_VALUES > 0
14465 "non-delegitimized UNSPEC %s (%d) found in variable location",
14466 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14467 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14468 XINT (rtl, 1));
14469 #else
14470 "non-delegitimized UNSPEC %d found in variable location",
14471 XINT (rtl, 1));
14472 #endif
14473 expansion_failed (NULL_TREE, rtl,
14474 "UNSPEC hasn't been delegitimized.\n");
14475 return false;
14476 }
14477
14478 if (CONST_POLY_INT_P (rtl))
14479 return false;
14480
14481 if (targetm.const_not_ok_for_debug_p (rtl))
14482 {
14483 expansion_failed (NULL_TREE, rtl,
14484 "Expression rejected for debug by the backend.\n");
14485 return false;
14486 }
14487
14488 /* FIXME: Refer to PR60655. It is possible for simplification
14489 of rtl expressions in var tracking to produce such expressions.
14490 We should really identify / validate expressions
14491 enclosed in CONST that can be handled by assemblers on various
14492 targets and only handle legitimate cases here. */
14493 switch (GET_CODE (rtl))
14494 {
14495 case SYMBOL_REF:
14496 break;
14497 case NOT:
14498 case NEG:
14499 return false;
14500 default:
14501 return true;
14502 }
14503
14504 if (CONSTANT_POOL_ADDRESS_P (rtl))
14505 {
14506 bool marked;
14507 get_pool_constant_mark (rtl, &marked);
14508 /* If all references to this pool constant were optimized away,
14509 it was not output and thus we can't represent it. */
14510 if (!marked)
14511 {
14512 expansion_failed (NULL_TREE, rtl,
14513 "Constant was removed from constant pool.\n");
14514 return false;
14515 }
14516 }
14517
14518 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14519 return false;
14520
14521 /* Avoid references to external symbols in debug info, on several targets
14522 the linker might even refuse to link when linking a shared library,
14523 and in many other cases the relocations for .debug_info/.debug_loc are
14524 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14525 to be defined within the same shared library or executable are fine. */
14526 if (SYMBOL_REF_EXTERNAL_P (rtl))
14527 {
14528 tree decl = SYMBOL_REF_DECL (rtl);
14529
14530 if (decl == NULL || !targetm.binds_local_p (decl))
14531 {
14532 expansion_failed (NULL_TREE, rtl,
14533 "Symbol not defined in current TU.\n");
14534 return false;
14535 }
14536 }
14537
14538 return true;
14539 }
14540
14541 /* Return true if constant RTL can be emitted in DW_OP_addr or
14542 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14543 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14544
14545 static bool
14546 const_ok_for_output (rtx rtl)
14547 {
14548 if (GET_CODE (rtl) == SYMBOL_REF)
14549 return const_ok_for_output_1 (rtl);
14550
14551 if (GET_CODE (rtl) == CONST)
14552 {
14553 subrtx_var_iterator::array_type array;
14554 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14555 if (!const_ok_for_output_1 (*iter))
14556 return false;
14557 return true;
14558 }
14559
14560 return true;
14561 }
14562
14563 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14564 if possible, NULL otherwise. */
14565
14566 static dw_die_ref
14567 base_type_for_mode (machine_mode mode, bool unsignedp)
14568 {
14569 dw_die_ref type_die;
14570 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14571
14572 if (type == NULL)
14573 return NULL;
14574 switch (TREE_CODE (type))
14575 {
14576 case INTEGER_TYPE:
14577 case REAL_TYPE:
14578 break;
14579 default:
14580 return NULL;
14581 }
14582 type_die = lookup_type_die (type);
14583 if (!type_die)
14584 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14585 comp_unit_die ());
14586 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14587 return NULL;
14588 return type_die;
14589 }
14590
14591 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14592 type matching MODE, or, if MODE is narrower than or as wide as
14593 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14594 possible. */
14595
14596 static dw_loc_descr_ref
14597 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14598 {
14599 machine_mode outer_mode = mode;
14600 dw_die_ref type_die;
14601 dw_loc_descr_ref cvt;
14602
14603 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14604 {
14605 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14606 return op;
14607 }
14608 type_die = base_type_for_mode (outer_mode, 1);
14609 if (type_die == NULL)
14610 return NULL;
14611 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14612 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14613 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14614 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14615 add_loc_descr (&op, cvt);
14616 return op;
14617 }
14618
14619 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14620
14621 static dw_loc_descr_ref
14622 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14623 dw_loc_descr_ref op1)
14624 {
14625 dw_loc_descr_ref ret = op0;
14626 add_loc_descr (&ret, op1);
14627 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14628 if (STORE_FLAG_VALUE != 1)
14629 {
14630 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14631 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14632 }
14633 return ret;
14634 }
14635
14636 /* Subroutine of scompare_loc_descriptor for the case in which we're
14637 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14638 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14639
14640 static dw_loc_descr_ref
14641 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14642 scalar_int_mode op_mode,
14643 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14644 {
14645 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14646 dw_loc_descr_ref cvt;
14647
14648 if (type_die == NULL)
14649 return NULL;
14650 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14651 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14652 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14653 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14654 add_loc_descr (&op0, cvt);
14655 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14656 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14657 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14658 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14659 add_loc_descr (&op1, cvt);
14660 return compare_loc_descriptor (op, op0, op1);
14661 }
14662
14663 /* Subroutine of scompare_loc_descriptor for the case in which we're
14664 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14665 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14666
14667 static dw_loc_descr_ref
14668 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14669 scalar_int_mode op_mode,
14670 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14671 {
14672 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14673 /* For eq/ne, if the operands are known to be zero-extended,
14674 there is no need to do the fancy shifting up. */
14675 if (op == DW_OP_eq || op == DW_OP_ne)
14676 {
14677 dw_loc_descr_ref last0, last1;
14678 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14679 ;
14680 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14681 ;
14682 /* deref_size zero extends, and for constants we can check
14683 whether they are zero extended or not. */
14684 if (((last0->dw_loc_opc == DW_OP_deref_size
14685 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14686 || (CONST_INT_P (XEXP (rtl, 0))
14687 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14688 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14689 && ((last1->dw_loc_opc == DW_OP_deref_size
14690 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14691 || (CONST_INT_P (XEXP (rtl, 1))
14692 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14693 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14694 return compare_loc_descriptor (op, op0, op1);
14695
14696 /* EQ/NE comparison against constant in narrower type than
14697 DWARF2_ADDR_SIZE can be performed either as
14698 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14699 DW_OP_{eq,ne}
14700 or
14701 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14702 DW_OP_{eq,ne}. Pick whatever is shorter. */
14703 if (CONST_INT_P (XEXP (rtl, 1))
14704 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14705 && (size_of_int_loc_descriptor (shift) + 1
14706 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14707 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14708 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14709 & GET_MODE_MASK (op_mode))))
14710 {
14711 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14712 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14713 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14714 & GET_MODE_MASK (op_mode));
14715 return compare_loc_descriptor (op, op0, op1);
14716 }
14717 }
14718 add_loc_descr (&op0, int_loc_descriptor (shift));
14719 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14720 if (CONST_INT_P (XEXP (rtl, 1)))
14721 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14722 else
14723 {
14724 add_loc_descr (&op1, int_loc_descriptor (shift));
14725 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14726 }
14727 return compare_loc_descriptor (op, op0, op1);
14728 }
14729
14730 /* Return location descriptor for unsigned comparison OP RTL. */
14731
14732 static dw_loc_descr_ref
14733 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14734 machine_mode mem_mode)
14735 {
14736 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14737 dw_loc_descr_ref op0, op1;
14738
14739 if (op_mode == VOIDmode)
14740 op_mode = GET_MODE (XEXP (rtl, 1));
14741 if (op_mode == VOIDmode)
14742 return NULL;
14743
14744 scalar_int_mode int_op_mode;
14745 if (dwarf_strict
14746 && dwarf_version < 5
14747 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14748 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14749 return NULL;
14750
14751 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14752 VAR_INIT_STATUS_INITIALIZED);
14753 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14754 VAR_INIT_STATUS_INITIALIZED);
14755
14756 if (op0 == NULL || op1 == NULL)
14757 return NULL;
14758
14759 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14760 {
14761 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14762 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14763
14764 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14765 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14766 }
14767 return compare_loc_descriptor (op, op0, op1);
14768 }
14769
14770 /* Return location descriptor for unsigned comparison OP RTL. */
14771
14772 static dw_loc_descr_ref
14773 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14774 machine_mode mem_mode)
14775 {
14776 dw_loc_descr_ref op0, op1;
14777
14778 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14779 if (test_op_mode == VOIDmode)
14780 test_op_mode = GET_MODE (XEXP (rtl, 1));
14781
14782 scalar_int_mode op_mode;
14783 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14784 return NULL;
14785
14786 if (dwarf_strict
14787 && dwarf_version < 5
14788 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14789 return NULL;
14790
14791 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14792 VAR_INIT_STATUS_INITIALIZED);
14793 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14794 VAR_INIT_STATUS_INITIALIZED);
14795
14796 if (op0 == NULL || op1 == NULL)
14797 return NULL;
14798
14799 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14800 {
14801 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14802 dw_loc_descr_ref last0, last1;
14803 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14804 ;
14805 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14806 ;
14807 if (CONST_INT_P (XEXP (rtl, 0)))
14808 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14809 /* deref_size zero extends, so no need to mask it again. */
14810 else if (last0->dw_loc_opc != DW_OP_deref_size
14811 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14812 {
14813 add_loc_descr (&op0, int_loc_descriptor (mask));
14814 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14815 }
14816 if (CONST_INT_P (XEXP (rtl, 1)))
14817 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14818 /* deref_size zero extends, so no need to mask it again. */
14819 else if (last1->dw_loc_opc != DW_OP_deref_size
14820 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14821 {
14822 add_loc_descr (&op1, int_loc_descriptor (mask));
14823 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14824 }
14825 }
14826 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14827 {
14828 HOST_WIDE_INT bias = 1;
14829 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14830 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14831 if (CONST_INT_P (XEXP (rtl, 1)))
14832 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14833 + INTVAL (XEXP (rtl, 1)));
14834 else
14835 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14836 bias, 0));
14837 }
14838 return compare_loc_descriptor (op, op0, op1);
14839 }
14840
14841 /* Return location descriptor for {U,S}{MIN,MAX}. */
14842
14843 static dw_loc_descr_ref
14844 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14845 machine_mode mem_mode)
14846 {
14847 enum dwarf_location_atom op;
14848 dw_loc_descr_ref op0, op1, ret;
14849 dw_loc_descr_ref bra_node, drop_node;
14850
14851 scalar_int_mode int_mode;
14852 if (dwarf_strict
14853 && dwarf_version < 5
14854 && (!is_a <scalar_int_mode> (mode, &int_mode)
14855 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14856 return NULL;
14857
14858 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14859 VAR_INIT_STATUS_INITIALIZED);
14860 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14861 VAR_INIT_STATUS_INITIALIZED);
14862
14863 if (op0 == NULL || op1 == NULL)
14864 return NULL;
14865
14866 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14867 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14868 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14869 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14870 {
14871 /* Checked by the caller. */
14872 int_mode = as_a <scalar_int_mode> (mode);
14873 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14874 {
14875 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14876 add_loc_descr (&op0, int_loc_descriptor (mask));
14877 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14878 add_loc_descr (&op1, int_loc_descriptor (mask));
14879 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14880 }
14881 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14882 {
14883 HOST_WIDE_INT bias = 1;
14884 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14885 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14886 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14887 }
14888 }
14889 else if (is_a <scalar_int_mode> (mode, &int_mode)
14890 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14891 {
14892 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14893 add_loc_descr (&op0, int_loc_descriptor (shift));
14894 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14895 add_loc_descr (&op1, int_loc_descriptor (shift));
14896 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14897 }
14898 else if (is_a <scalar_int_mode> (mode, &int_mode)
14899 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14900 {
14901 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14902 dw_loc_descr_ref cvt;
14903 if (type_die == NULL)
14904 return NULL;
14905 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14906 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14907 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14908 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14909 add_loc_descr (&op0, cvt);
14910 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14911 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14912 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14913 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14914 add_loc_descr (&op1, cvt);
14915 }
14916
14917 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14918 op = DW_OP_lt;
14919 else
14920 op = DW_OP_gt;
14921 ret = op0;
14922 add_loc_descr (&ret, op1);
14923 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14924 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14925 add_loc_descr (&ret, bra_node);
14926 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14927 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14928 add_loc_descr (&ret, drop_node);
14929 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14930 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14931 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14932 && is_a <scalar_int_mode> (mode, &int_mode)
14933 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14934 ret = convert_descriptor_to_mode (int_mode, ret);
14935 return ret;
14936 }
14937
14938 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14939 but after converting arguments to type_die, afterwards
14940 convert back to unsigned. */
14941
14942 static dw_loc_descr_ref
14943 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14944 scalar_int_mode mode, machine_mode mem_mode)
14945 {
14946 dw_loc_descr_ref cvt, op0, op1;
14947
14948 if (type_die == NULL)
14949 return NULL;
14950 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14951 VAR_INIT_STATUS_INITIALIZED);
14952 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14953 VAR_INIT_STATUS_INITIALIZED);
14954 if (op0 == NULL || op1 == NULL)
14955 return NULL;
14956 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14957 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14958 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14959 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14960 add_loc_descr (&op0, cvt);
14961 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14962 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14963 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14964 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14965 add_loc_descr (&op1, cvt);
14966 add_loc_descr (&op0, op1);
14967 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14968 return convert_descriptor_to_mode (mode, op0);
14969 }
14970
14971 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14972 const0 is DW_OP_lit0 or corresponding typed constant,
14973 const1 is DW_OP_lit1 or corresponding typed constant
14974 and constMSB is constant with just the MSB bit set
14975 for the mode):
14976 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14977 L1: const0 DW_OP_swap
14978 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14979 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14980 L3: DW_OP_drop
14981 L4: DW_OP_nop
14982
14983 CTZ is similar:
14984 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14985 L1: const0 DW_OP_swap
14986 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14987 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14988 L3: DW_OP_drop
14989 L4: DW_OP_nop
14990
14991 FFS is similar:
14992 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14993 L1: const1 DW_OP_swap
14994 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14995 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14996 L3: DW_OP_drop
14997 L4: DW_OP_nop */
14998
14999 static dw_loc_descr_ref
15000 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
15001 machine_mode mem_mode)
15002 {
15003 dw_loc_descr_ref op0, ret, tmp;
15004 HOST_WIDE_INT valv;
15005 dw_loc_descr_ref l1jump, l1label;
15006 dw_loc_descr_ref l2jump, l2label;
15007 dw_loc_descr_ref l3jump, l3label;
15008 dw_loc_descr_ref l4jump, l4label;
15009 rtx msb;
15010
15011 if (GET_MODE (XEXP (rtl, 0)) != mode)
15012 return NULL;
15013
15014 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15015 VAR_INIT_STATUS_INITIALIZED);
15016 if (op0 == NULL)
15017 return NULL;
15018 ret = op0;
15019 if (GET_CODE (rtl) == CLZ)
15020 {
15021 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15022 valv = GET_MODE_BITSIZE (mode);
15023 }
15024 else if (GET_CODE (rtl) == FFS)
15025 valv = 0;
15026 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15027 valv = GET_MODE_BITSIZE (mode);
15028 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15029 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15030 add_loc_descr (&ret, l1jump);
15031 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15032 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15033 VAR_INIT_STATUS_INITIALIZED);
15034 if (tmp == NULL)
15035 return NULL;
15036 add_loc_descr (&ret, tmp);
15037 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15038 add_loc_descr (&ret, l4jump);
15039 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15040 ? const1_rtx : const0_rtx,
15041 mode, mem_mode,
15042 VAR_INIT_STATUS_INITIALIZED);
15043 if (l1label == NULL)
15044 return NULL;
15045 add_loc_descr (&ret, l1label);
15046 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15047 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15048 add_loc_descr (&ret, l2label);
15049 if (GET_CODE (rtl) != CLZ)
15050 msb = const1_rtx;
15051 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15052 msb = GEN_INT (HOST_WIDE_INT_1U
15053 << (GET_MODE_BITSIZE (mode) - 1));
15054 else
15055 msb = immed_wide_int_const
15056 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15057 GET_MODE_PRECISION (mode)), mode);
15058 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15059 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15060 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15061 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15062 else
15063 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15064 VAR_INIT_STATUS_INITIALIZED);
15065 if (tmp == NULL)
15066 return NULL;
15067 add_loc_descr (&ret, tmp);
15068 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15069 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15070 add_loc_descr (&ret, l3jump);
15071 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15072 VAR_INIT_STATUS_INITIALIZED);
15073 if (tmp == NULL)
15074 return NULL;
15075 add_loc_descr (&ret, tmp);
15076 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15077 ? DW_OP_shl : DW_OP_shr, 0, 0));
15078 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15079 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15080 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15081 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15082 add_loc_descr (&ret, l2jump);
15083 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15084 add_loc_descr (&ret, l3label);
15085 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15086 add_loc_descr (&ret, l4label);
15087 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15088 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15089 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15090 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15091 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15092 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15093 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15094 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15095 return ret;
15096 }
15097
15098 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15099 const1 is DW_OP_lit1 or corresponding typed constant):
15100 const0 DW_OP_swap
15101 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15102 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15103 L2: DW_OP_drop
15104
15105 PARITY is similar:
15106 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15107 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15108 L2: DW_OP_drop */
15109
15110 static dw_loc_descr_ref
15111 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15112 machine_mode mem_mode)
15113 {
15114 dw_loc_descr_ref op0, ret, tmp;
15115 dw_loc_descr_ref l1jump, l1label;
15116 dw_loc_descr_ref l2jump, l2label;
15117
15118 if (GET_MODE (XEXP (rtl, 0)) != mode)
15119 return NULL;
15120
15121 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15122 VAR_INIT_STATUS_INITIALIZED);
15123 if (op0 == NULL)
15124 return NULL;
15125 ret = op0;
15126 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15127 VAR_INIT_STATUS_INITIALIZED);
15128 if (tmp == NULL)
15129 return NULL;
15130 add_loc_descr (&ret, tmp);
15131 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15132 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15133 add_loc_descr (&ret, l1label);
15134 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15135 add_loc_descr (&ret, l2jump);
15136 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15137 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15138 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15139 VAR_INIT_STATUS_INITIALIZED);
15140 if (tmp == NULL)
15141 return NULL;
15142 add_loc_descr (&ret, tmp);
15143 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15144 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15145 ? DW_OP_plus : DW_OP_xor, 0, 0));
15146 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15147 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15148 VAR_INIT_STATUS_INITIALIZED);
15149 add_loc_descr (&ret, tmp);
15150 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15151 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15152 add_loc_descr (&ret, l1jump);
15153 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15154 add_loc_descr (&ret, l2label);
15155 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15156 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15157 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15158 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15159 return ret;
15160 }
15161
15162 /* BSWAP (constS is initial shift count, either 56 or 24):
15163 constS const0
15164 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15165 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15166 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15167 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15168 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15169
15170 static dw_loc_descr_ref
15171 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15172 machine_mode mem_mode)
15173 {
15174 dw_loc_descr_ref op0, ret, tmp;
15175 dw_loc_descr_ref l1jump, l1label;
15176 dw_loc_descr_ref l2jump, l2label;
15177
15178 if (BITS_PER_UNIT != 8
15179 || (GET_MODE_BITSIZE (mode) != 32
15180 && GET_MODE_BITSIZE (mode) != 64))
15181 return NULL;
15182
15183 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15184 VAR_INIT_STATUS_INITIALIZED);
15185 if (op0 == NULL)
15186 return NULL;
15187
15188 ret = op0;
15189 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15190 mode, mem_mode,
15191 VAR_INIT_STATUS_INITIALIZED);
15192 if (tmp == NULL)
15193 return NULL;
15194 add_loc_descr (&ret, tmp);
15195 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15196 VAR_INIT_STATUS_INITIALIZED);
15197 if (tmp == NULL)
15198 return NULL;
15199 add_loc_descr (&ret, tmp);
15200 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15201 add_loc_descr (&ret, l1label);
15202 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15203 mode, mem_mode,
15204 VAR_INIT_STATUS_INITIALIZED);
15205 add_loc_descr (&ret, tmp);
15206 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15207 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15208 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15209 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15210 VAR_INIT_STATUS_INITIALIZED);
15211 if (tmp == NULL)
15212 return NULL;
15213 add_loc_descr (&ret, tmp);
15214 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15215 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15216 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15217 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15218 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15219 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15220 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15221 VAR_INIT_STATUS_INITIALIZED);
15222 add_loc_descr (&ret, tmp);
15223 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15224 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15225 add_loc_descr (&ret, l2jump);
15226 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15227 VAR_INIT_STATUS_INITIALIZED);
15228 add_loc_descr (&ret, tmp);
15229 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15230 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15231 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15232 add_loc_descr (&ret, l1jump);
15233 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15234 add_loc_descr (&ret, l2label);
15235 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15236 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15237 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15238 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15239 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15240 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15241 return ret;
15242 }
15243
15244 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15245 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15246 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15247 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15248
15249 ROTATERT is similar:
15250 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15251 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15252 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15253
15254 static dw_loc_descr_ref
15255 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15256 machine_mode mem_mode)
15257 {
15258 rtx rtlop1 = XEXP (rtl, 1);
15259 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15260 int i;
15261
15262 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15263 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15264 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15265 VAR_INIT_STATUS_INITIALIZED);
15266 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15267 VAR_INIT_STATUS_INITIALIZED);
15268 if (op0 == NULL || op1 == NULL)
15269 return NULL;
15270 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15271 for (i = 0; i < 2; i++)
15272 {
15273 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15274 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15275 mode, mem_mode,
15276 VAR_INIT_STATUS_INITIALIZED);
15277 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15278 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15279 ? DW_OP_const4u
15280 : HOST_BITS_PER_WIDE_INT == 64
15281 ? DW_OP_const8u : DW_OP_constu,
15282 GET_MODE_MASK (mode), 0);
15283 else
15284 mask[i] = NULL;
15285 if (mask[i] == NULL)
15286 return NULL;
15287 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15288 }
15289 ret = op0;
15290 add_loc_descr (&ret, op1);
15291 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15292 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15293 if (GET_CODE (rtl) == ROTATERT)
15294 {
15295 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15296 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15297 GET_MODE_BITSIZE (mode), 0));
15298 }
15299 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15300 if (mask[0] != NULL)
15301 add_loc_descr (&ret, mask[0]);
15302 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15303 if (mask[1] != NULL)
15304 {
15305 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15306 add_loc_descr (&ret, mask[1]);
15307 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15308 }
15309 if (GET_CODE (rtl) == ROTATE)
15310 {
15311 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15312 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15313 GET_MODE_BITSIZE (mode), 0));
15314 }
15315 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15316 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15317 return ret;
15318 }
15319
15320 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15321 for DEBUG_PARAMETER_REF RTL. */
15322
15323 static dw_loc_descr_ref
15324 parameter_ref_descriptor (rtx rtl)
15325 {
15326 dw_loc_descr_ref ret;
15327 dw_die_ref ref;
15328
15329 if (dwarf_strict)
15330 return NULL;
15331 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15332 /* With LTO during LTRANS we get the late DIE that refers to the early
15333 DIE, thus we add another indirection here. This seems to confuse
15334 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15335 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15336 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15337 if (ref)
15338 {
15339 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15340 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15341 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15342 }
15343 else
15344 {
15345 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15346 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15347 }
15348 return ret;
15349 }
15350
15351 /* The following routine converts the RTL for a variable or parameter
15352 (resident in memory) into an equivalent Dwarf representation of a
15353 mechanism for getting the address of that same variable onto the top of a
15354 hypothetical "address evaluation" stack.
15355
15356 When creating memory location descriptors, we are effectively transforming
15357 the RTL for a memory-resident object into its Dwarf postfix expression
15358 equivalent. This routine recursively descends an RTL tree, turning
15359 it into Dwarf postfix code as it goes.
15360
15361 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15362
15363 MEM_MODE is the mode of the memory reference, needed to handle some
15364 autoincrement addressing modes.
15365
15366 Return 0 if we can't represent the location. */
15367
15368 dw_loc_descr_ref
15369 mem_loc_descriptor (rtx rtl, machine_mode mode,
15370 machine_mode mem_mode,
15371 enum var_init_status initialized)
15372 {
15373 dw_loc_descr_ref mem_loc_result = NULL;
15374 enum dwarf_location_atom op;
15375 dw_loc_descr_ref op0, op1;
15376 rtx inner = NULL_RTX;
15377 poly_int64 offset;
15378
15379 if (mode == VOIDmode)
15380 mode = GET_MODE (rtl);
15381
15382 /* Note that for a dynamically sized array, the location we will generate a
15383 description of here will be the lowest numbered location which is
15384 actually within the array. That's *not* necessarily the same as the
15385 zeroth element of the array. */
15386
15387 rtl = targetm.delegitimize_address (rtl);
15388
15389 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15390 return NULL;
15391
15392 scalar_int_mode int_mode, inner_mode, op1_mode;
15393 switch (GET_CODE (rtl))
15394 {
15395 case POST_INC:
15396 case POST_DEC:
15397 case POST_MODIFY:
15398 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15399
15400 case SUBREG:
15401 /* The case of a subreg may arise when we have a local (register)
15402 variable or a formal (register) parameter which doesn't quite fill
15403 up an entire register. For now, just assume that it is
15404 legitimate to make the Dwarf info refer to the whole register which
15405 contains the given subreg. */
15406 if (!subreg_lowpart_p (rtl))
15407 break;
15408 inner = SUBREG_REG (rtl);
15409 /* FALLTHRU */
15410 case TRUNCATE:
15411 if (inner == NULL_RTX)
15412 inner = XEXP (rtl, 0);
15413 if (is_a <scalar_int_mode> (mode, &int_mode)
15414 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15415 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15416 #ifdef POINTERS_EXTEND_UNSIGNED
15417 || (int_mode == Pmode && mem_mode != VOIDmode)
15418 #endif
15419 )
15420 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15421 {
15422 mem_loc_result = mem_loc_descriptor (inner,
15423 inner_mode,
15424 mem_mode, initialized);
15425 break;
15426 }
15427 if (dwarf_strict && dwarf_version < 5)
15428 break;
15429 if (is_a <scalar_int_mode> (mode, &int_mode)
15430 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15431 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15432 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15433 {
15434 dw_die_ref type_die;
15435 dw_loc_descr_ref cvt;
15436
15437 mem_loc_result = mem_loc_descriptor (inner,
15438 GET_MODE (inner),
15439 mem_mode, initialized);
15440 if (mem_loc_result == NULL)
15441 break;
15442 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15443 if (type_die == NULL)
15444 {
15445 mem_loc_result = NULL;
15446 break;
15447 }
15448 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15449 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15450 else
15451 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15452 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15453 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15454 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15455 add_loc_descr (&mem_loc_result, cvt);
15456 if (is_a <scalar_int_mode> (mode, &int_mode)
15457 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15458 {
15459 /* Convert it to untyped afterwards. */
15460 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15461 add_loc_descr (&mem_loc_result, cvt);
15462 }
15463 }
15464 break;
15465
15466 case REG:
15467 if (!is_a <scalar_int_mode> (mode, &int_mode)
15468 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15469 && rtl != arg_pointer_rtx
15470 && rtl != frame_pointer_rtx
15471 #ifdef POINTERS_EXTEND_UNSIGNED
15472 && (int_mode != Pmode || mem_mode == VOIDmode)
15473 #endif
15474 ))
15475 {
15476 dw_die_ref type_die;
15477 unsigned int dbx_regnum;
15478
15479 if (dwarf_strict && dwarf_version < 5)
15480 break;
15481 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
15482 break;
15483 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15484 if (type_die == NULL)
15485 break;
15486
15487 dbx_regnum = dbx_reg_number (rtl);
15488 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15489 break;
15490 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15491 dbx_regnum, 0);
15492 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15493 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15494 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15495 break;
15496 }
15497 /* Whenever a register number forms a part of the description of the
15498 method for calculating the (dynamic) address of a memory resident
15499 object, DWARF rules require the register number be referred to as
15500 a "base register". This distinction is not based in any way upon
15501 what category of register the hardware believes the given register
15502 belongs to. This is strictly DWARF terminology we're dealing with
15503 here. Note that in cases where the location of a memory-resident
15504 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15505 OP_CONST (0)) the actual DWARF location descriptor that we generate
15506 may just be OP_BASEREG (basereg). This may look deceptively like
15507 the object in question was allocated to a register (rather than in
15508 memory) so DWARF consumers need to be aware of the subtle
15509 distinction between OP_REG and OP_BASEREG. */
15510 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15511 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15512 else if (stack_realign_drap
15513 && crtl->drap_reg
15514 && crtl->args.internal_arg_pointer == rtl
15515 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15516 {
15517 /* If RTL is internal_arg_pointer, which has been optimized
15518 out, use DRAP instead. */
15519 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15520 VAR_INIT_STATUS_INITIALIZED);
15521 }
15522 break;
15523
15524 case SIGN_EXTEND:
15525 case ZERO_EXTEND:
15526 if (!is_a <scalar_int_mode> (mode, &int_mode)
15527 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15528 break;
15529 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15530 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15531 if (op0 == 0)
15532 break;
15533 else if (GET_CODE (rtl) == ZERO_EXTEND
15534 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15535 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15536 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15537 to expand zero extend as two shifts instead of
15538 masking. */
15539 && GET_MODE_SIZE (inner_mode) <= 4)
15540 {
15541 mem_loc_result = op0;
15542 add_loc_descr (&mem_loc_result,
15543 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15544 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15545 }
15546 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15547 {
15548 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15549 shift *= BITS_PER_UNIT;
15550 if (GET_CODE (rtl) == SIGN_EXTEND)
15551 op = DW_OP_shra;
15552 else
15553 op = DW_OP_shr;
15554 mem_loc_result = op0;
15555 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15556 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15557 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15558 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15559 }
15560 else if (!dwarf_strict || dwarf_version >= 5)
15561 {
15562 dw_die_ref type_die1, type_die2;
15563 dw_loc_descr_ref cvt;
15564
15565 type_die1 = base_type_for_mode (inner_mode,
15566 GET_CODE (rtl) == ZERO_EXTEND);
15567 if (type_die1 == NULL)
15568 break;
15569 type_die2 = base_type_for_mode (int_mode, 1);
15570 if (type_die2 == NULL)
15571 break;
15572 mem_loc_result = op0;
15573 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15574 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15575 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15576 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15577 add_loc_descr (&mem_loc_result, cvt);
15578 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15579 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15580 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15581 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15582 add_loc_descr (&mem_loc_result, cvt);
15583 }
15584 break;
15585
15586 case MEM:
15587 {
15588 rtx new_rtl = avoid_constant_pool_reference (rtl);
15589 if (new_rtl != rtl)
15590 {
15591 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15592 initialized);
15593 if (mem_loc_result != NULL)
15594 return mem_loc_result;
15595 }
15596 }
15597 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15598 get_address_mode (rtl), mode,
15599 VAR_INIT_STATUS_INITIALIZED);
15600 if (mem_loc_result == NULL)
15601 mem_loc_result = tls_mem_loc_descriptor (rtl);
15602 if (mem_loc_result != NULL)
15603 {
15604 if (!is_a <scalar_int_mode> (mode, &int_mode)
15605 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15606 {
15607 dw_die_ref type_die;
15608 dw_loc_descr_ref deref;
15609 HOST_WIDE_INT size;
15610
15611 if (dwarf_strict && dwarf_version < 5)
15612 return NULL;
15613 if (!GET_MODE_SIZE (mode).is_constant (&size))
15614 return NULL;
15615 type_die
15616 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15617 if (type_die == NULL)
15618 return NULL;
15619 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15620 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15621 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15622 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15623 add_loc_descr (&mem_loc_result, deref);
15624 }
15625 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15626 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15627 else
15628 add_loc_descr (&mem_loc_result,
15629 new_loc_descr (DW_OP_deref_size,
15630 GET_MODE_SIZE (int_mode), 0));
15631 }
15632 break;
15633
15634 case LO_SUM:
15635 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15636
15637 case LABEL_REF:
15638 /* Some ports can transform a symbol ref into a label ref, because
15639 the symbol ref is too far away and has to be dumped into a constant
15640 pool. */
15641 case CONST:
15642 case SYMBOL_REF:
15643 if (!is_a <scalar_int_mode> (mode, &int_mode)
15644 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15645 #ifdef POINTERS_EXTEND_UNSIGNED
15646 && (int_mode != Pmode || mem_mode == VOIDmode)
15647 #endif
15648 ))
15649 break;
15650 if (GET_CODE (rtl) == SYMBOL_REF
15651 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15652 {
15653 dw_loc_descr_ref temp;
15654
15655 /* If this is not defined, we have no way to emit the data. */
15656 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15657 break;
15658
15659 temp = new_addr_loc_descr (rtl, dtprel_true);
15660
15661 /* We check for DWARF 5 here because gdb did not implement
15662 DW_OP_form_tls_address until after 7.12. */
15663 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15664 ? DW_OP_form_tls_address
15665 : DW_OP_GNU_push_tls_address),
15666 0, 0);
15667 add_loc_descr (&mem_loc_result, temp);
15668
15669 break;
15670 }
15671
15672 if (!const_ok_for_output (rtl))
15673 {
15674 if (GET_CODE (rtl) == CONST)
15675 switch (GET_CODE (XEXP (rtl, 0)))
15676 {
15677 case NOT:
15678 op = DW_OP_not;
15679 goto try_const_unop;
15680 case NEG:
15681 op = DW_OP_neg;
15682 goto try_const_unop;
15683 try_const_unop:
15684 rtx arg;
15685 arg = XEXP (XEXP (rtl, 0), 0);
15686 if (!CONSTANT_P (arg))
15687 arg = gen_rtx_CONST (int_mode, arg);
15688 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15689 initialized);
15690 if (op0)
15691 {
15692 mem_loc_result = op0;
15693 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15694 }
15695 break;
15696 default:
15697 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15698 mem_mode, initialized);
15699 break;
15700 }
15701 break;
15702 }
15703
15704 symref:
15705 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15706 vec_safe_push (used_rtx_array, rtl);
15707 break;
15708
15709 case CONCAT:
15710 case CONCATN:
15711 case VAR_LOCATION:
15712 case DEBUG_IMPLICIT_PTR:
15713 expansion_failed (NULL_TREE, rtl,
15714 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15715 return 0;
15716
15717 case ENTRY_VALUE:
15718 if (dwarf_strict && dwarf_version < 5)
15719 return NULL;
15720 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15721 {
15722 if (!is_a <scalar_int_mode> (mode, &int_mode)
15723 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15724 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15725 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15726 else
15727 {
15728 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15729 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15730 return NULL;
15731 op0 = one_reg_loc_descriptor (dbx_regnum,
15732 VAR_INIT_STATUS_INITIALIZED);
15733 }
15734 }
15735 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15736 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15737 {
15738 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15739 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15740 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15741 return NULL;
15742 }
15743 else
15744 gcc_unreachable ();
15745 if (op0 == NULL)
15746 return NULL;
15747 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15748 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15749 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15750 break;
15751
15752 case DEBUG_PARAMETER_REF:
15753 mem_loc_result = parameter_ref_descriptor (rtl);
15754 break;
15755
15756 case PRE_MODIFY:
15757 /* Extract the PLUS expression nested inside and fall into
15758 PLUS code below. */
15759 rtl = XEXP (rtl, 1);
15760 goto plus;
15761
15762 case PRE_INC:
15763 case PRE_DEC:
15764 /* Turn these into a PLUS expression and fall into the PLUS code
15765 below. */
15766 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15767 gen_int_mode (GET_CODE (rtl) == PRE_INC
15768 ? GET_MODE_UNIT_SIZE (mem_mode)
15769 : -GET_MODE_UNIT_SIZE (mem_mode),
15770 mode));
15771
15772 /* fall through */
15773
15774 case PLUS:
15775 plus:
15776 if (is_based_loc (rtl)
15777 && is_a <scalar_int_mode> (mode, &int_mode)
15778 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15779 || XEXP (rtl, 0) == arg_pointer_rtx
15780 || XEXP (rtl, 0) == frame_pointer_rtx))
15781 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15782 INTVAL (XEXP (rtl, 1)),
15783 VAR_INIT_STATUS_INITIALIZED);
15784 else
15785 {
15786 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15787 VAR_INIT_STATUS_INITIALIZED);
15788 if (mem_loc_result == 0)
15789 break;
15790
15791 if (CONST_INT_P (XEXP (rtl, 1))
15792 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15793 <= DWARF2_ADDR_SIZE))
15794 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15795 else
15796 {
15797 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15798 VAR_INIT_STATUS_INITIALIZED);
15799 if (op1 == 0)
15800 return NULL;
15801 add_loc_descr (&mem_loc_result, op1);
15802 add_loc_descr (&mem_loc_result,
15803 new_loc_descr (DW_OP_plus, 0, 0));
15804 }
15805 }
15806 break;
15807
15808 /* If a pseudo-reg is optimized away, it is possible for it to
15809 be replaced with a MEM containing a multiply or shift. */
15810 case MINUS:
15811 op = DW_OP_minus;
15812 goto do_binop;
15813
15814 case MULT:
15815 op = DW_OP_mul;
15816 goto do_binop;
15817
15818 case DIV:
15819 if ((!dwarf_strict || dwarf_version >= 5)
15820 && is_a <scalar_int_mode> (mode, &int_mode)
15821 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15822 {
15823 mem_loc_result = typed_binop (DW_OP_div, rtl,
15824 base_type_for_mode (mode, 0),
15825 int_mode, mem_mode);
15826 break;
15827 }
15828 op = DW_OP_div;
15829 goto do_binop;
15830
15831 case UMOD:
15832 op = DW_OP_mod;
15833 goto do_binop;
15834
15835 case ASHIFT:
15836 op = DW_OP_shl;
15837 goto do_shift;
15838
15839 case ASHIFTRT:
15840 op = DW_OP_shra;
15841 goto do_shift;
15842
15843 case LSHIFTRT:
15844 op = DW_OP_shr;
15845 goto do_shift;
15846
15847 do_shift:
15848 if (!is_a <scalar_int_mode> (mode, &int_mode))
15849 break;
15850 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15851 VAR_INIT_STATUS_INITIALIZED);
15852 {
15853 rtx rtlop1 = XEXP (rtl, 1);
15854 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15855 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15856 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15857 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15858 VAR_INIT_STATUS_INITIALIZED);
15859 }
15860
15861 if (op0 == 0 || op1 == 0)
15862 break;
15863
15864 mem_loc_result = op0;
15865 add_loc_descr (&mem_loc_result, op1);
15866 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15867 break;
15868
15869 case AND:
15870 op = DW_OP_and;
15871 goto do_binop;
15872
15873 case IOR:
15874 op = DW_OP_or;
15875 goto do_binop;
15876
15877 case XOR:
15878 op = DW_OP_xor;
15879 goto do_binop;
15880
15881 do_binop:
15882 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15883 VAR_INIT_STATUS_INITIALIZED);
15884 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15885 VAR_INIT_STATUS_INITIALIZED);
15886
15887 if (op0 == 0 || op1 == 0)
15888 break;
15889
15890 mem_loc_result = op0;
15891 add_loc_descr (&mem_loc_result, op1);
15892 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15893 break;
15894
15895 case MOD:
15896 if ((!dwarf_strict || dwarf_version >= 5)
15897 && is_a <scalar_int_mode> (mode, &int_mode)
15898 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15899 {
15900 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15901 base_type_for_mode (mode, 0),
15902 int_mode, mem_mode);
15903 break;
15904 }
15905
15906 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15907 VAR_INIT_STATUS_INITIALIZED);
15908 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15909 VAR_INIT_STATUS_INITIALIZED);
15910
15911 if (op0 == 0 || op1 == 0)
15912 break;
15913
15914 mem_loc_result = op0;
15915 add_loc_descr (&mem_loc_result, op1);
15916 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15917 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15918 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15919 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15920 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15921 break;
15922
15923 case UDIV:
15924 if ((!dwarf_strict || dwarf_version >= 5)
15925 && is_a <scalar_int_mode> (mode, &int_mode))
15926 {
15927 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15928 {
15929 op = DW_OP_div;
15930 goto do_binop;
15931 }
15932 mem_loc_result = typed_binop (DW_OP_div, rtl,
15933 base_type_for_mode (int_mode, 1),
15934 int_mode, mem_mode);
15935 }
15936 break;
15937
15938 case NOT:
15939 op = DW_OP_not;
15940 goto do_unop;
15941
15942 case ABS:
15943 op = DW_OP_abs;
15944 goto do_unop;
15945
15946 case NEG:
15947 op = DW_OP_neg;
15948 goto do_unop;
15949
15950 do_unop:
15951 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15952 VAR_INIT_STATUS_INITIALIZED);
15953
15954 if (op0 == 0)
15955 break;
15956
15957 mem_loc_result = op0;
15958 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15959 break;
15960
15961 case CONST_INT:
15962 if (!is_a <scalar_int_mode> (mode, &int_mode)
15963 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15964 #ifdef POINTERS_EXTEND_UNSIGNED
15965 || (int_mode == Pmode
15966 && mem_mode != VOIDmode
15967 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15968 #endif
15969 )
15970 {
15971 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15972 break;
15973 }
15974 if ((!dwarf_strict || dwarf_version >= 5)
15975 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15976 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15977 {
15978 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15979 scalar_int_mode amode;
15980 if (type_die == NULL)
15981 return NULL;
15982 if (INTVAL (rtl) >= 0
15983 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15984 .exists (&amode))
15985 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15986 /* const DW_OP_convert <XXX> vs.
15987 DW_OP_const_type <XXX, 1, const>. */
15988 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15989 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15990 {
15991 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15992 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15993 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15994 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15995 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15996 add_loc_descr (&mem_loc_result, op0);
15997 return mem_loc_result;
15998 }
15999 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
16000 INTVAL (rtl));
16001 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16002 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16003 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16004 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16005 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16006 else
16007 {
16008 mem_loc_result->dw_loc_oprnd2.val_class
16009 = dw_val_class_const_double;
16010 mem_loc_result->dw_loc_oprnd2.v.val_double
16011 = double_int::from_shwi (INTVAL (rtl));
16012 }
16013 }
16014 break;
16015
16016 case CONST_DOUBLE:
16017 if (!dwarf_strict || dwarf_version >= 5)
16018 {
16019 dw_die_ref type_die;
16020
16021 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16022 CONST_DOUBLE rtx could represent either a large integer
16023 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16024 the value is always a floating point constant.
16025
16026 When it is an integer, a CONST_DOUBLE is used whenever
16027 the constant requires 2 HWIs to be adequately represented.
16028 We output CONST_DOUBLEs as blocks. */
16029 if (mode == VOIDmode
16030 || (GET_MODE (rtl) == VOIDmode
16031 && maybe_ne (GET_MODE_BITSIZE (mode),
16032 HOST_BITS_PER_DOUBLE_INT)))
16033 break;
16034 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16035 if (type_die == NULL)
16036 return NULL;
16037 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16038 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16039 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16040 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16041 #if TARGET_SUPPORTS_WIDE_INT == 0
16042 if (!SCALAR_FLOAT_MODE_P (mode))
16043 {
16044 mem_loc_result->dw_loc_oprnd2.val_class
16045 = dw_val_class_const_double;
16046 mem_loc_result->dw_loc_oprnd2.v.val_double
16047 = rtx_to_double_int (rtl);
16048 }
16049 else
16050 #endif
16051 {
16052 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16053 unsigned int length = GET_MODE_SIZE (float_mode);
16054 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16055
16056 insert_float (rtl, array);
16057 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16058 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16059 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16060 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16061 }
16062 }
16063 break;
16064
16065 case CONST_WIDE_INT:
16066 if (!dwarf_strict || dwarf_version >= 5)
16067 {
16068 dw_die_ref type_die;
16069
16070 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16071 if (type_die == NULL)
16072 return NULL;
16073 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16074 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16075 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16076 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16077 mem_loc_result->dw_loc_oprnd2.val_class
16078 = dw_val_class_wide_int;
16079 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16080 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16081 }
16082 break;
16083
16084 case CONST_POLY_INT:
16085 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16086 break;
16087
16088 case EQ:
16089 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16090 break;
16091
16092 case GE:
16093 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16094 break;
16095
16096 case GT:
16097 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16098 break;
16099
16100 case LE:
16101 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16102 break;
16103
16104 case LT:
16105 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16106 break;
16107
16108 case NE:
16109 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16110 break;
16111
16112 case GEU:
16113 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16114 break;
16115
16116 case GTU:
16117 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16118 break;
16119
16120 case LEU:
16121 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16122 break;
16123
16124 case LTU:
16125 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16126 break;
16127
16128 case UMIN:
16129 case UMAX:
16130 if (!SCALAR_INT_MODE_P (mode))
16131 break;
16132 /* FALLTHRU */
16133 case SMIN:
16134 case SMAX:
16135 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16136 break;
16137
16138 case ZERO_EXTRACT:
16139 case SIGN_EXTRACT:
16140 if (CONST_INT_P (XEXP (rtl, 1))
16141 && CONST_INT_P (XEXP (rtl, 2))
16142 && is_a <scalar_int_mode> (mode, &int_mode)
16143 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16144 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16145 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16146 && ((unsigned) INTVAL (XEXP (rtl, 1))
16147 + (unsigned) INTVAL (XEXP (rtl, 2))
16148 <= GET_MODE_BITSIZE (int_mode)))
16149 {
16150 int shift, size;
16151 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16152 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16153 if (op0 == 0)
16154 break;
16155 if (GET_CODE (rtl) == SIGN_EXTRACT)
16156 op = DW_OP_shra;
16157 else
16158 op = DW_OP_shr;
16159 mem_loc_result = op0;
16160 size = INTVAL (XEXP (rtl, 1));
16161 shift = INTVAL (XEXP (rtl, 2));
16162 if (BITS_BIG_ENDIAN)
16163 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16164 if (shift + size != (int) DWARF2_ADDR_SIZE)
16165 {
16166 add_loc_descr (&mem_loc_result,
16167 int_loc_descriptor (DWARF2_ADDR_SIZE
16168 - shift - size));
16169 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16170 }
16171 if (size != (int) DWARF2_ADDR_SIZE)
16172 {
16173 add_loc_descr (&mem_loc_result,
16174 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16175 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16176 }
16177 }
16178 break;
16179
16180 case IF_THEN_ELSE:
16181 {
16182 dw_loc_descr_ref op2, bra_node, drop_node;
16183 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16184 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16185 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16186 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16187 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16188 VAR_INIT_STATUS_INITIALIZED);
16189 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16190 VAR_INIT_STATUS_INITIALIZED);
16191 if (op0 == NULL || op1 == NULL || op2 == NULL)
16192 break;
16193
16194 mem_loc_result = op1;
16195 add_loc_descr (&mem_loc_result, op2);
16196 add_loc_descr (&mem_loc_result, op0);
16197 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16198 add_loc_descr (&mem_loc_result, bra_node);
16199 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16200 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16201 add_loc_descr (&mem_loc_result, drop_node);
16202 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16203 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16204 }
16205 break;
16206
16207 case FLOAT_EXTEND:
16208 case FLOAT_TRUNCATE:
16209 case FLOAT:
16210 case UNSIGNED_FLOAT:
16211 case FIX:
16212 case UNSIGNED_FIX:
16213 if (!dwarf_strict || dwarf_version >= 5)
16214 {
16215 dw_die_ref type_die;
16216 dw_loc_descr_ref cvt;
16217
16218 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16219 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16220 if (op0 == NULL)
16221 break;
16222 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16223 && (GET_CODE (rtl) == FLOAT
16224 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16225 {
16226 type_die = base_type_for_mode (int_mode,
16227 GET_CODE (rtl) == UNSIGNED_FLOAT);
16228 if (type_die == NULL)
16229 break;
16230 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16231 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16232 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16233 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16234 add_loc_descr (&op0, cvt);
16235 }
16236 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16237 if (type_die == NULL)
16238 break;
16239 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16240 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16241 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16242 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16243 add_loc_descr (&op0, cvt);
16244 if (is_a <scalar_int_mode> (mode, &int_mode)
16245 && (GET_CODE (rtl) == FIX
16246 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16247 {
16248 op0 = convert_descriptor_to_mode (int_mode, op0);
16249 if (op0 == NULL)
16250 break;
16251 }
16252 mem_loc_result = op0;
16253 }
16254 break;
16255
16256 case CLZ:
16257 case CTZ:
16258 case FFS:
16259 if (is_a <scalar_int_mode> (mode, &int_mode))
16260 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16261 break;
16262
16263 case POPCOUNT:
16264 case PARITY:
16265 if (is_a <scalar_int_mode> (mode, &int_mode))
16266 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16267 break;
16268
16269 case BSWAP:
16270 if (is_a <scalar_int_mode> (mode, &int_mode))
16271 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16272 break;
16273
16274 case ROTATE:
16275 case ROTATERT:
16276 if (is_a <scalar_int_mode> (mode, &int_mode))
16277 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16278 break;
16279
16280 case COMPARE:
16281 /* In theory, we could implement the above. */
16282 /* DWARF cannot represent the unsigned compare operations
16283 natively. */
16284 case SS_MULT:
16285 case US_MULT:
16286 case SS_DIV:
16287 case US_DIV:
16288 case SS_PLUS:
16289 case US_PLUS:
16290 case SS_MINUS:
16291 case US_MINUS:
16292 case SS_NEG:
16293 case US_NEG:
16294 case SS_ABS:
16295 case SS_ASHIFT:
16296 case US_ASHIFT:
16297 case SS_TRUNCATE:
16298 case US_TRUNCATE:
16299 case UNORDERED:
16300 case ORDERED:
16301 case UNEQ:
16302 case UNGE:
16303 case UNGT:
16304 case UNLE:
16305 case UNLT:
16306 case LTGT:
16307 case FRACT_CONVERT:
16308 case UNSIGNED_FRACT_CONVERT:
16309 case SAT_FRACT:
16310 case UNSIGNED_SAT_FRACT:
16311 case SQRT:
16312 case ASM_OPERANDS:
16313 case VEC_MERGE:
16314 case VEC_SELECT:
16315 case VEC_CONCAT:
16316 case VEC_DUPLICATE:
16317 case VEC_SERIES:
16318 case UNSPEC:
16319 case HIGH:
16320 case FMA:
16321 case STRICT_LOW_PART:
16322 case CONST_VECTOR:
16323 case CONST_FIXED:
16324 case CLRSB:
16325 case CLOBBER:
16326 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16327 can't express it in the debug info. This can happen e.g. with some
16328 TLS UNSPECs. */
16329 break;
16330
16331 case CONST_STRING:
16332 resolve_one_addr (&rtl);
16333 goto symref;
16334
16335 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16336 the expression. An UNSPEC rtx represents a raw DWARF operation,
16337 new_loc_descr is called for it to build the operation directly.
16338 Otherwise mem_loc_descriptor is called recursively. */
16339 case PARALLEL:
16340 {
16341 int index = 0;
16342 dw_loc_descr_ref exp_result = NULL;
16343
16344 for (; index < XVECLEN (rtl, 0); index++)
16345 {
16346 rtx elem = XVECEXP (rtl, 0, index);
16347 if (GET_CODE (elem) == UNSPEC)
16348 {
16349 /* Each DWARF operation UNSPEC contain two operands, if
16350 one operand is not used for the operation, const0_rtx is
16351 passed. */
16352 gcc_assert (XVECLEN (elem, 0) == 2);
16353
16354 HOST_WIDE_INT dw_op = XINT (elem, 1);
16355 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16356 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16357 exp_result
16358 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16359 oprnd2);
16360 }
16361 else
16362 exp_result
16363 = mem_loc_descriptor (elem, mode, mem_mode,
16364 VAR_INIT_STATUS_INITIALIZED);
16365
16366 if (!mem_loc_result)
16367 mem_loc_result = exp_result;
16368 else
16369 add_loc_descr (&mem_loc_result, exp_result);
16370 }
16371
16372 break;
16373 }
16374
16375 default:
16376 if (flag_checking)
16377 {
16378 print_rtl (stderr, rtl);
16379 gcc_unreachable ();
16380 }
16381 break;
16382 }
16383
16384 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16385 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16386
16387 return mem_loc_result;
16388 }
16389
16390 /* Return a descriptor that describes the concatenation of two locations.
16391 This is typically a complex variable. */
16392
16393 static dw_loc_descr_ref
16394 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16395 {
16396 /* At present we only track constant-sized pieces. */
16397 unsigned int size0, size1;
16398 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16399 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16400 return 0;
16401
16402 dw_loc_descr_ref cc_loc_result = NULL;
16403 dw_loc_descr_ref x0_ref
16404 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16405 dw_loc_descr_ref x1_ref
16406 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16407
16408 if (x0_ref == 0 || x1_ref == 0)
16409 return 0;
16410
16411 cc_loc_result = x0_ref;
16412 add_loc_descr_op_piece (&cc_loc_result, size0);
16413
16414 add_loc_descr (&cc_loc_result, x1_ref);
16415 add_loc_descr_op_piece (&cc_loc_result, size1);
16416
16417 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16418 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16419
16420 return cc_loc_result;
16421 }
16422
16423 /* Return a descriptor that describes the concatenation of N
16424 locations. */
16425
16426 static dw_loc_descr_ref
16427 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16428 {
16429 unsigned int i;
16430 dw_loc_descr_ref cc_loc_result = NULL;
16431 unsigned int n = XVECLEN (concatn, 0);
16432 unsigned int size;
16433
16434 for (i = 0; i < n; ++i)
16435 {
16436 dw_loc_descr_ref ref;
16437 rtx x = XVECEXP (concatn, 0, i);
16438
16439 /* At present we only track constant-sized pieces. */
16440 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16441 return NULL;
16442
16443 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16444 if (ref == NULL)
16445 return NULL;
16446
16447 add_loc_descr (&cc_loc_result, ref);
16448 add_loc_descr_op_piece (&cc_loc_result, size);
16449 }
16450
16451 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16452 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16453
16454 return cc_loc_result;
16455 }
16456
16457 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16458 for DEBUG_IMPLICIT_PTR RTL. */
16459
16460 static dw_loc_descr_ref
16461 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16462 {
16463 dw_loc_descr_ref ret;
16464 dw_die_ref ref;
16465
16466 if (dwarf_strict && dwarf_version < 5)
16467 return NULL;
16468 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16469 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16470 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16471 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16472 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16473 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16474 if (ref)
16475 {
16476 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16477 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16478 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16479 }
16480 else
16481 {
16482 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16483 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16484 }
16485 return ret;
16486 }
16487
16488 /* Output a proper Dwarf location descriptor for a variable or parameter
16489 which is either allocated in a register or in a memory location. For a
16490 register, we just generate an OP_REG and the register number. For a
16491 memory location we provide a Dwarf postfix expression describing how to
16492 generate the (dynamic) address of the object onto the address stack.
16493
16494 MODE is mode of the decl if this loc_descriptor is going to be used in
16495 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16496 allowed, VOIDmode otherwise.
16497
16498 If we don't know how to describe it, return 0. */
16499
16500 static dw_loc_descr_ref
16501 loc_descriptor (rtx rtl, machine_mode mode,
16502 enum var_init_status initialized)
16503 {
16504 dw_loc_descr_ref loc_result = NULL;
16505 scalar_int_mode int_mode;
16506
16507 switch (GET_CODE (rtl))
16508 {
16509 case SUBREG:
16510 /* The case of a subreg may arise when we have a local (register)
16511 variable or a formal (register) parameter which doesn't quite fill
16512 up an entire register. For now, just assume that it is
16513 legitimate to make the Dwarf info refer to the whole register which
16514 contains the given subreg. */
16515 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16516 loc_result = loc_descriptor (SUBREG_REG (rtl),
16517 GET_MODE (SUBREG_REG (rtl)), initialized);
16518 else
16519 goto do_default;
16520 break;
16521
16522 case REG:
16523 loc_result = reg_loc_descriptor (rtl, initialized);
16524 break;
16525
16526 case MEM:
16527 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16528 GET_MODE (rtl), initialized);
16529 if (loc_result == NULL)
16530 loc_result = tls_mem_loc_descriptor (rtl);
16531 if (loc_result == NULL)
16532 {
16533 rtx new_rtl = avoid_constant_pool_reference (rtl);
16534 if (new_rtl != rtl)
16535 loc_result = loc_descriptor (new_rtl, mode, initialized);
16536 }
16537 break;
16538
16539 case CONCAT:
16540 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16541 initialized);
16542 break;
16543
16544 case CONCATN:
16545 loc_result = concatn_loc_descriptor (rtl, initialized);
16546 break;
16547
16548 case VAR_LOCATION:
16549 /* Single part. */
16550 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16551 {
16552 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16553 if (GET_CODE (loc) == EXPR_LIST)
16554 loc = XEXP (loc, 0);
16555 loc_result = loc_descriptor (loc, mode, initialized);
16556 break;
16557 }
16558
16559 rtl = XEXP (rtl, 1);
16560 /* FALLTHRU */
16561
16562 case PARALLEL:
16563 {
16564 rtvec par_elems = XVEC (rtl, 0);
16565 int num_elem = GET_NUM_ELEM (par_elems);
16566 machine_mode mode;
16567 int i, size;
16568
16569 /* Create the first one, so we have something to add to. */
16570 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16571 VOIDmode, initialized);
16572 if (loc_result == NULL)
16573 return NULL;
16574 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16575 /* At present we only track constant-sized pieces. */
16576 if (!GET_MODE_SIZE (mode).is_constant (&size))
16577 return NULL;
16578 add_loc_descr_op_piece (&loc_result, size);
16579 for (i = 1; i < num_elem; i++)
16580 {
16581 dw_loc_descr_ref temp;
16582
16583 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16584 VOIDmode, initialized);
16585 if (temp == NULL)
16586 return NULL;
16587 add_loc_descr (&loc_result, temp);
16588 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16589 /* At present we only track constant-sized pieces. */
16590 if (!GET_MODE_SIZE (mode).is_constant (&size))
16591 return NULL;
16592 add_loc_descr_op_piece (&loc_result, size);
16593 }
16594 }
16595 break;
16596
16597 case CONST_INT:
16598 if (mode != VOIDmode && mode != BLKmode)
16599 {
16600 int_mode = as_a <scalar_int_mode> (mode);
16601 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16602 INTVAL (rtl));
16603 }
16604 break;
16605
16606 case CONST_DOUBLE:
16607 if (mode == VOIDmode)
16608 mode = GET_MODE (rtl);
16609
16610 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16611 {
16612 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16613
16614 /* Note that a CONST_DOUBLE rtx could represent either an integer
16615 or a floating-point constant. A CONST_DOUBLE is used whenever
16616 the constant requires more than one word in order to be
16617 adequately represented. We output CONST_DOUBLEs as blocks. */
16618 scalar_mode smode = as_a <scalar_mode> (mode);
16619 loc_result = new_loc_descr (DW_OP_implicit_value,
16620 GET_MODE_SIZE (smode), 0);
16621 #if TARGET_SUPPORTS_WIDE_INT == 0
16622 if (!SCALAR_FLOAT_MODE_P (smode))
16623 {
16624 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16625 loc_result->dw_loc_oprnd2.v.val_double
16626 = rtx_to_double_int (rtl);
16627 }
16628 else
16629 #endif
16630 {
16631 unsigned int length = GET_MODE_SIZE (smode);
16632 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16633
16634 insert_float (rtl, array);
16635 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16636 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16637 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16638 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16639 }
16640 }
16641 break;
16642
16643 case CONST_WIDE_INT:
16644 if (mode == VOIDmode)
16645 mode = GET_MODE (rtl);
16646
16647 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16648 {
16649 int_mode = as_a <scalar_int_mode> (mode);
16650 loc_result = new_loc_descr (DW_OP_implicit_value,
16651 GET_MODE_SIZE (int_mode), 0);
16652 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16653 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16654 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16655 }
16656 break;
16657
16658 case CONST_VECTOR:
16659 if (mode == VOIDmode)
16660 mode = GET_MODE (rtl);
16661
16662 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16663 {
16664 unsigned int length;
16665 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16666 return NULL;
16667
16668 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16669 unsigned char *array
16670 = ggc_vec_alloc<unsigned char> (length * elt_size);
16671 unsigned int i;
16672 unsigned char *p;
16673 machine_mode imode = GET_MODE_INNER (mode);
16674
16675 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16676 switch (GET_MODE_CLASS (mode))
16677 {
16678 case MODE_VECTOR_INT:
16679 for (i = 0, p = array; i < length; i++, p += elt_size)
16680 {
16681 rtx elt = CONST_VECTOR_ELT (rtl, i);
16682 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16683 }
16684 break;
16685
16686 case MODE_VECTOR_FLOAT:
16687 for (i = 0, p = array; i < length; i++, p += elt_size)
16688 {
16689 rtx elt = CONST_VECTOR_ELT (rtl, i);
16690 insert_float (elt, p);
16691 }
16692 break;
16693
16694 default:
16695 gcc_unreachable ();
16696 }
16697
16698 loc_result = new_loc_descr (DW_OP_implicit_value,
16699 length * elt_size, 0);
16700 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16701 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16702 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16703 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16704 }
16705 break;
16706
16707 case CONST:
16708 if (mode == VOIDmode
16709 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16710 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16711 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16712 {
16713 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16714 break;
16715 }
16716 /* FALLTHROUGH */
16717 case SYMBOL_REF:
16718 if (!const_ok_for_output (rtl))
16719 break;
16720 /* FALLTHROUGH */
16721 case LABEL_REF:
16722 if (is_a <scalar_int_mode> (mode, &int_mode)
16723 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16724 && (dwarf_version >= 4 || !dwarf_strict))
16725 {
16726 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16727 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16728 vec_safe_push (used_rtx_array, rtl);
16729 }
16730 break;
16731
16732 case DEBUG_IMPLICIT_PTR:
16733 loc_result = implicit_ptr_descriptor (rtl, 0);
16734 break;
16735
16736 case PLUS:
16737 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16738 && CONST_INT_P (XEXP (rtl, 1)))
16739 {
16740 loc_result
16741 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16742 break;
16743 }
16744 /* FALLTHRU */
16745 do_default:
16746 default:
16747 if ((is_a <scalar_int_mode> (mode, &int_mode)
16748 && GET_MODE (rtl) == int_mode
16749 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16750 && dwarf_version >= 4)
16751 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16752 {
16753 /* Value expression. */
16754 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16755 if (loc_result)
16756 add_loc_descr (&loc_result,
16757 new_loc_descr (DW_OP_stack_value, 0, 0));
16758 }
16759 break;
16760 }
16761
16762 return loc_result;
16763 }
16764
16765 /* We need to figure out what section we should use as the base for the
16766 address ranges where a given location is valid.
16767 1. If this particular DECL has a section associated with it, use that.
16768 2. If this function has a section associated with it, use that.
16769 3. Otherwise, use the text section.
16770 XXX: If you split a variable across multiple sections, we won't notice. */
16771
16772 static const char *
16773 secname_for_decl (const_tree decl)
16774 {
16775 const char *secname;
16776
16777 if (VAR_OR_FUNCTION_DECL_P (decl)
16778 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16779 && DECL_SECTION_NAME (decl))
16780 secname = DECL_SECTION_NAME (decl);
16781 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16782 secname = DECL_SECTION_NAME (current_function_decl);
16783 else if (cfun && in_cold_section_p)
16784 secname = crtl->subsections.cold_section_label;
16785 else
16786 secname = text_section_label;
16787
16788 return secname;
16789 }
16790
16791 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16792
16793 static bool
16794 decl_by_reference_p (tree decl)
16795 {
16796 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16797 || VAR_P (decl))
16798 && DECL_BY_REFERENCE (decl));
16799 }
16800
16801 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16802 for VARLOC. */
16803
16804 static dw_loc_descr_ref
16805 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16806 enum var_init_status initialized)
16807 {
16808 int have_address = 0;
16809 dw_loc_descr_ref descr;
16810 machine_mode mode;
16811
16812 if (want_address != 2)
16813 {
16814 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16815 /* Single part. */
16816 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16817 {
16818 varloc = PAT_VAR_LOCATION_LOC (varloc);
16819 if (GET_CODE (varloc) == EXPR_LIST)
16820 varloc = XEXP (varloc, 0);
16821 mode = GET_MODE (varloc);
16822 if (MEM_P (varloc))
16823 {
16824 rtx addr = XEXP (varloc, 0);
16825 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16826 mode, initialized);
16827 if (descr)
16828 have_address = 1;
16829 else
16830 {
16831 rtx x = avoid_constant_pool_reference (varloc);
16832 if (x != varloc)
16833 descr = mem_loc_descriptor (x, mode, VOIDmode,
16834 initialized);
16835 }
16836 }
16837 else
16838 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16839 }
16840 else
16841 return 0;
16842 }
16843 else
16844 {
16845 if (GET_CODE (varloc) == VAR_LOCATION)
16846 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16847 else
16848 mode = DECL_MODE (loc);
16849 descr = loc_descriptor (varloc, mode, initialized);
16850 have_address = 1;
16851 }
16852
16853 if (!descr)
16854 return 0;
16855
16856 if (want_address == 2 && !have_address
16857 && (dwarf_version >= 4 || !dwarf_strict))
16858 {
16859 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16860 {
16861 expansion_failed (loc, NULL_RTX,
16862 "DWARF address size mismatch");
16863 return 0;
16864 }
16865 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16866 have_address = 1;
16867 }
16868 /* Show if we can't fill the request for an address. */
16869 if (want_address && !have_address)
16870 {
16871 expansion_failed (loc, NULL_RTX,
16872 "Want address and only have value");
16873 return 0;
16874 }
16875
16876 /* If we've got an address and don't want one, dereference. */
16877 if (!want_address && have_address)
16878 {
16879 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16880 enum dwarf_location_atom op;
16881
16882 if (size > DWARF2_ADDR_SIZE || size == -1)
16883 {
16884 expansion_failed (loc, NULL_RTX,
16885 "DWARF address size mismatch");
16886 return 0;
16887 }
16888 else if (size == DWARF2_ADDR_SIZE)
16889 op = DW_OP_deref;
16890 else
16891 op = DW_OP_deref_size;
16892
16893 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16894 }
16895
16896 return descr;
16897 }
16898
16899 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16900 if it is not possible. */
16901
16902 static dw_loc_descr_ref
16903 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16904 {
16905 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16906 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16907 else if (dwarf_version >= 3 || !dwarf_strict)
16908 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16909 else
16910 return NULL;
16911 }
16912
16913 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16914 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16915
16916 static dw_loc_descr_ref
16917 dw_sra_loc_expr (tree decl, rtx loc)
16918 {
16919 rtx p;
16920 unsigned HOST_WIDE_INT padsize = 0;
16921 dw_loc_descr_ref descr, *descr_tail;
16922 unsigned HOST_WIDE_INT decl_size;
16923 rtx varloc;
16924 enum var_init_status initialized;
16925
16926 if (DECL_SIZE (decl) == NULL
16927 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16928 return NULL;
16929
16930 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16931 descr = NULL;
16932 descr_tail = &descr;
16933
16934 for (p = loc; p; p = XEXP (p, 1))
16935 {
16936 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16937 rtx loc_note = *decl_piece_varloc_ptr (p);
16938 dw_loc_descr_ref cur_descr;
16939 dw_loc_descr_ref *tail, last = NULL;
16940 unsigned HOST_WIDE_INT opsize = 0;
16941
16942 if (loc_note == NULL_RTX
16943 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16944 {
16945 padsize += bitsize;
16946 continue;
16947 }
16948 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16949 varloc = NOTE_VAR_LOCATION (loc_note);
16950 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16951 if (cur_descr == NULL)
16952 {
16953 padsize += bitsize;
16954 continue;
16955 }
16956
16957 /* Check that cur_descr either doesn't use
16958 DW_OP_*piece operations, or their sum is equal
16959 to bitsize. Otherwise we can't embed it. */
16960 for (tail = &cur_descr; *tail != NULL;
16961 tail = &(*tail)->dw_loc_next)
16962 if ((*tail)->dw_loc_opc == DW_OP_piece)
16963 {
16964 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16965 * BITS_PER_UNIT;
16966 last = *tail;
16967 }
16968 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16969 {
16970 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16971 last = *tail;
16972 }
16973
16974 if (last != NULL && opsize != bitsize)
16975 {
16976 padsize += bitsize;
16977 /* Discard the current piece of the descriptor and release any
16978 addr_table entries it uses. */
16979 remove_loc_list_addr_table_entries (cur_descr);
16980 continue;
16981 }
16982
16983 /* If there is a hole, add DW_OP_*piece after empty DWARF
16984 expression, which means that those bits are optimized out. */
16985 if (padsize)
16986 {
16987 if (padsize > decl_size)
16988 {
16989 remove_loc_list_addr_table_entries (cur_descr);
16990 goto discard_descr;
16991 }
16992 decl_size -= padsize;
16993 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16994 if (*descr_tail == NULL)
16995 {
16996 remove_loc_list_addr_table_entries (cur_descr);
16997 goto discard_descr;
16998 }
16999 descr_tail = &(*descr_tail)->dw_loc_next;
17000 padsize = 0;
17001 }
17002 *descr_tail = cur_descr;
17003 descr_tail = tail;
17004 if (bitsize > decl_size)
17005 goto discard_descr;
17006 decl_size -= bitsize;
17007 if (last == NULL)
17008 {
17009 HOST_WIDE_INT offset = 0;
17010 if (GET_CODE (varloc) == VAR_LOCATION
17011 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17012 {
17013 varloc = PAT_VAR_LOCATION_LOC (varloc);
17014 if (GET_CODE (varloc) == EXPR_LIST)
17015 varloc = XEXP (varloc, 0);
17016 }
17017 do
17018 {
17019 if (GET_CODE (varloc) == CONST
17020 || GET_CODE (varloc) == SIGN_EXTEND
17021 || GET_CODE (varloc) == ZERO_EXTEND)
17022 varloc = XEXP (varloc, 0);
17023 else if (GET_CODE (varloc) == SUBREG)
17024 varloc = SUBREG_REG (varloc);
17025 else
17026 break;
17027 }
17028 while (1);
17029 /* DW_OP_bit_size offset should be zero for register
17030 or implicit location descriptions and empty location
17031 descriptions, but for memory addresses needs big endian
17032 adjustment. */
17033 if (MEM_P (varloc))
17034 {
17035 unsigned HOST_WIDE_INT memsize;
17036 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17037 goto discard_descr;
17038 memsize *= BITS_PER_UNIT;
17039 if (memsize != bitsize)
17040 {
17041 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17042 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17043 goto discard_descr;
17044 if (memsize < bitsize)
17045 goto discard_descr;
17046 if (BITS_BIG_ENDIAN)
17047 offset = memsize - bitsize;
17048 }
17049 }
17050
17051 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17052 if (*descr_tail == NULL)
17053 goto discard_descr;
17054 descr_tail = &(*descr_tail)->dw_loc_next;
17055 }
17056 }
17057
17058 /* If there were any non-empty expressions, add padding till the end of
17059 the decl. */
17060 if (descr != NULL && decl_size != 0)
17061 {
17062 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17063 if (*descr_tail == NULL)
17064 goto discard_descr;
17065 }
17066 return descr;
17067
17068 discard_descr:
17069 /* Discard the descriptor and release any addr_table entries it uses. */
17070 remove_loc_list_addr_table_entries (descr);
17071 return NULL;
17072 }
17073
17074 /* Return the dwarf representation of the location list LOC_LIST of
17075 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17076 function. */
17077
17078 static dw_loc_list_ref
17079 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17080 {
17081 const char *endname, *secname;
17082 var_loc_view endview;
17083 rtx varloc;
17084 enum var_init_status initialized;
17085 struct var_loc_node *node;
17086 dw_loc_descr_ref descr;
17087 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17088 dw_loc_list_ref list = NULL;
17089 dw_loc_list_ref *listp = &list;
17090
17091 /* Now that we know what section we are using for a base,
17092 actually construct the list of locations.
17093 The first location information is what is passed to the
17094 function that creates the location list, and the remaining
17095 locations just get added on to that list.
17096 Note that we only know the start address for a location
17097 (IE location changes), so to build the range, we use
17098 the range [current location start, next location start].
17099 This means we have to special case the last node, and generate
17100 a range of [last location start, end of function label]. */
17101
17102 if (cfun && crtl->has_bb_partition)
17103 {
17104 bool save_in_cold_section_p = in_cold_section_p;
17105 in_cold_section_p = first_function_block_is_cold;
17106 if (loc_list->last_before_switch == NULL)
17107 in_cold_section_p = !in_cold_section_p;
17108 secname = secname_for_decl (decl);
17109 in_cold_section_p = save_in_cold_section_p;
17110 }
17111 else
17112 secname = secname_for_decl (decl);
17113
17114 for (node = loc_list->first; node; node = node->next)
17115 {
17116 bool range_across_switch = false;
17117 if (GET_CODE (node->loc) == EXPR_LIST
17118 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17119 {
17120 if (GET_CODE (node->loc) == EXPR_LIST)
17121 {
17122 descr = NULL;
17123 /* This requires DW_OP_{,bit_}piece, which is not usable
17124 inside DWARF expressions. */
17125 if (want_address == 2)
17126 descr = dw_sra_loc_expr (decl, node->loc);
17127 }
17128 else
17129 {
17130 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17131 varloc = NOTE_VAR_LOCATION (node->loc);
17132 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17133 }
17134 if (descr)
17135 {
17136 /* If section switch happens in between node->label
17137 and node->next->label (or end of function) and
17138 we can't emit it as a single entry list,
17139 emit two ranges, first one ending at the end
17140 of first partition and second one starting at the
17141 beginning of second partition. */
17142 if (node == loc_list->last_before_switch
17143 && (node != loc_list->first || loc_list->first->next)
17144 && current_function_decl)
17145 {
17146 endname = cfun->fde->dw_fde_end;
17147 endview = 0;
17148 range_across_switch = true;
17149 }
17150 /* The variable has a location between NODE->LABEL and
17151 NODE->NEXT->LABEL. */
17152 else if (node->next)
17153 endname = node->next->label, endview = node->next->view;
17154 /* If the variable has a location at the last label
17155 it keeps its location until the end of function. */
17156 else if (!current_function_decl)
17157 endname = text_end_label, endview = 0;
17158 else
17159 {
17160 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17161 current_function_funcdef_no);
17162 endname = ggc_strdup (label_id);
17163 endview = 0;
17164 }
17165
17166 *listp = new_loc_list (descr, node->label, node->view,
17167 endname, endview, secname);
17168 if (TREE_CODE (decl) == PARM_DECL
17169 && node == loc_list->first
17170 && NOTE_P (node->loc)
17171 && strcmp (node->label, endname) == 0)
17172 (*listp)->force = true;
17173 listp = &(*listp)->dw_loc_next;
17174 }
17175 }
17176
17177 if (cfun
17178 && crtl->has_bb_partition
17179 && node == loc_list->last_before_switch)
17180 {
17181 bool save_in_cold_section_p = in_cold_section_p;
17182 in_cold_section_p = !first_function_block_is_cold;
17183 secname = secname_for_decl (decl);
17184 in_cold_section_p = save_in_cold_section_p;
17185 }
17186
17187 if (range_across_switch)
17188 {
17189 if (GET_CODE (node->loc) == EXPR_LIST)
17190 descr = dw_sra_loc_expr (decl, node->loc);
17191 else
17192 {
17193 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17194 varloc = NOTE_VAR_LOCATION (node->loc);
17195 descr = dw_loc_list_1 (decl, varloc, want_address,
17196 initialized);
17197 }
17198 gcc_assert (descr);
17199 /* The variable has a location between NODE->LABEL and
17200 NODE->NEXT->LABEL. */
17201 if (node->next)
17202 endname = node->next->label, endview = node->next->view;
17203 else
17204 endname = cfun->fde->dw_fde_second_end, endview = 0;
17205 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17206 endname, endview, secname);
17207 listp = &(*listp)->dw_loc_next;
17208 }
17209 }
17210
17211 /* Try to avoid the overhead of a location list emitting a location
17212 expression instead, but only if we didn't have more than one
17213 location entry in the first place. If some entries were not
17214 representable, we don't want to pretend a single entry that was
17215 applies to the entire scope in which the variable is
17216 available. */
17217 if (list && loc_list->first->next)
17218 gen_llsym (list);
17219 else
17220 maybe_gen_llsym (list);
17221
17222 return list;
17223 }
17224
17225 /* Return if the loc_list has only single element and thus can be represented
17226 as location description. */
17227
17228 static bool
17229 single_element_loc_list_p (dw_loc_list_ref list)
17230 {
17231 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17232 return !list->ll_symbol;
17233 }
17234
17235 /* Duplicate a single element of location list. */
17236
17237 static inline dw_loc_descr_ref
17238 copy_loc_descr (dw_loc_descr_ref ref)
17239 {
17240 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17241 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17242 return copy;
17243 }
17244
17245 /* To each location in list LIST append loc descr REF. */
17246
17247 static void
17248 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17249 {
17250 dw_loc_descr_ref copy;
17251 add_loc_descr (&list->expr, ref);
17252 list = list->dw_loc_next;
17253 while (list)
17254 {
17255 copy = copy_loc_descr (ref);
17256 add_loc_descr (&list->expr, copy);
17257 while (copy->dw_loc_next)
17258 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17259 list = list->dw_loc_next;
17260 }
17261 }
17262
17263 /* To each location in list LIST prepend loc descr REF. */
17264
17265 static void
17266 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17267 {
17268 dw_loc_descr_ref copy;
17269 dw_loc_descr_ref ref_end = list->expr;
17270 add_loc_descr (&ref, list->expr);
17271 list->expr = ref;
17272 list = list->dw_loc_next;
17273 while (list)
17274 {
17275 dw_loc_descr_ref end = list->expr;
17276 list->expr = copy = copy_loc_descr (ref);
17277 while (copy->dw_loc_next != ref_end)
17278 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17279 copy->dw_loc_next = end;
17280 list = list->dw_loc_next;
17281 }
17282 }
17283
17284 /* Given two lists RET and LIST
17285 produce location list that is result of adding expression in LIST
17286 to expression in RET on each position in program.
17287 Might be destructive on both RET and LIST.
17288
17289 TODO: We handle only simple cases of RET or LIST having at most one
17290 element. General case would involve sorting the lists in program order
17291 and merging them that will need some additional work.
17292 Adding that will improve quality of debug info especially for SRA-ed
17293 structures. */
17294
17295 static void
17296 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17297 {
17298 if (!list)
17299 return;
17300 if (!*ret)
17301 {
17302 *ret = list;
17303 return;
17304 }
17305 if (!list->dw_loc_next)
17306 {
17307 add_loc_descr_to_each (*ret, list->expr);
17308 return;
17309 }
17310 if (!(*ret)->dw_loc_next)
17311 {
17312 prepend_loc_descr_to_each (list, (*ret)->expr);
17313 *ret = list;
17314 return;
17315 }
17316 expansion_failed (NULL_TREE, NULL_RTX,
17317 "Don't know how to merge two non-trivial"
17318 " location lists.\n");
17319 *ret = NULL;
17320 return;
17321 }
17322
17323 /* LOC is constant expression. Try a luck, look it up in constant
17324 pool and return its loc_descr of its address. */
17325
17326 static dw_loc_descr_ref
17327 cst_pool_loc_descr (tree loc)
17328 {
17329 /* Get an RTL for this, if something has been emitted. */
17330 rtx rtl = lookup_constant_def (loc);
17331
17332 if (!rtl || !MEM_P (rtl))
17333 {
17334 gcc_assert (!rtl);
17335 return 0;
17336 }
17337 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17338
17339 /* TODO: We might get more coverage if we was actually delaying expansion
17340 of all expressions till end of compilation when constant pools are fully
17341 populated. */
17342 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17343 {
17344 expansion_failed (loc, NULL_RTX,
17345 "CST value in contant pool but not marked.");
17346 return 0;
17347 }
17348 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17349 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17350 }
17351
17352 /* Return dw_loc_list representing address of addr_expr LOC
17353 by looking for inner INDIRECT_REF expression and turning
17354 it into simple arithmetics.
17355
17356 See loc_list_from_tree for the meaning of CONTEXT. */
17357
17358 static dw_loc_list_ref
17359 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17360 loc_descr_context *context)
17361 {
17362 tree obj, offset;
17363 poly_int64 bitsize, bitpos, bytepos;
17364 machine_mode mode;
17365 int unsignedp, reversep, volatilep = 0;
17366 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17367
17368 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17369 &bitsize, &bitpos, &offset, &mode,
17370 &unsignedp, &reversep, &volatilep);
17371 STRIP_NOPS (obj);
17372 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17373 {
17374 expansion_failed (loc, NULL_RTX, "bitfield access");
17375 return 0;
17376 }
17377 if (!INDIRECT_REF_P (obj))
17378 {
17379 expansion_failed (obj,
17380 NULL_RTX, "no indirect ref in inner refrence");
17381 return 0;
17382 }
17383 if (!offset && known_eq (bitpos, 0))
17384 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17385 context);
17386 else if (toplev
17387 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17388 && (dwarf_version >= 4 || !dwarf_strict))
17389 {
17390 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17391 if (!list_ret)
17392 return 0;
17393 if (offset)
17394 {
17395 /* Variable offset. */
17396 list_ret1 = loc_list_from_tree (offset, 0, context);
17397 if (list_ret1 == 0)
17398 return 0;
17399 add_loc_list (&list_ret, list_ret1);
17400 if (!list_ret)
17401 return 0;
17402 add_loc_descr_to_each (list_ret,
17403 new_loc_descr (DW_OP_plus, 0, 0));
17404 }
17405 HOST_WIDE_INT value;
17406 if (bytepos.is_constant (&value) && value > 0)
17407 add_loc_descr_to_each (list_ret,
17408 new_loc_descr (DW_OP_plus_uconst, value, 0));
17409 else if (maybe_ne (bytepos, 0))
17410 loc_list_plus_const (list_ret, bytepos);
17411 add_loc_descr_to_each (list_ret,
17412 new_loc_descr (DW_OP_stack_value, 0, 0));
17413 }
17414 return list_ret;
17415 }
17416
17417 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17418 all operations from LOC are nops, move to the last one. Insert in NOPS all
17419 operations that are skipped. */
17420
17421 static void
17422 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17423 hash_set<dw_loc_descr_ref> &nops)
17424 {
17425 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17426 {
17427 nops.add (loc);
17428 loc = loc->dw_loc_next;
17429 }
17430 }
17431
17432 /* Helper for loc_descr_without_nops: free the location description operation
17433 P. */
17434
17435 bool
17436 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17437 {
17438 ggc_free (loc);
17439 return true;
17440 }
17441
17442 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17443 finishes LOC. */
17444
17445 static void
17446 loc_descr_without_nops (dw_loc_descr_ref &loc)
17447 {
17448 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17449 return;
17450
17451 /* Set of all DW_OP_nop operations we remove. */
17452 hash_set<dw_loc_descr_ref> nops;
17453
17454 /* First, strip all prefix NOP operations in order to keep the head of the
17455 operations list. */
17456 loc_descr_to_next_no_nop (loc, nops);
17457
17458 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17459 {
17460 /* For control flow operations: strip "prefix" nops in destination
17461 labels. */
17462 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17463 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17464 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17465 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17466
17467 /* Do the same for the operations that follow, then move to the next
17468 iteration. */
17469 if (cur->dw_loc_next != NULL)
17470 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17471 cur = cur->dw_loc_next;
17472 }
17473
17474 nops.traverse<void *, free_loc_descr> (NULL);
17475 }
17476
17477
17478 struct dwarf_procedure_info;
17479
17480 /* Helper structure for location descriptions generation. */
17481 struct loc_descr_context
17482 {
17483 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17484 NULL_TREE if DW_OP_push_object_address in invalid for this location
17485 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17486 tree context_type;
17487 /* The ..._DECL node that should be translated as a
17488 DW_OP_push_object_address operation. */
17489 tree base_decl;
17490 /* Information about the DWARF procedure we are currently generating. NULL if
17491 we are not generating a DWARF procedure. */
17492 struct dwarf_procedure_info *dpi;
17493 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17494 by consumer. Used for DW_TAG_generic_subrange attributes. */
17495 bool placeholder_arg;
17496 /* True if PLACEHOLDER_EXPR has been seen. */
17497 bool placeholder_seen;
17498 };
17499
17500 /* DWARF procedures generation
17501
17502 DWARF expressions (aka. location descriptions) are used to encode variable
17503 things such as sizes or offsets. Such computations can have redundant parts
17504 that can be factorized in order to reduce the size of the output debug
17505 information. This is the whole point of DWARF procedures.
17506
17507 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17508 already factorized into functions ("size functions") in order to handle very
17509 big and complex types. Such functions are quite simple: they have integral
17510 arguments, they return an integral result and their body contains only a
17511 return statement with arithmetic expressions. This is the only kind of
17512 function we are interested in translating into DWARF procedures, here.
17513
17514 DWARF expressions and DWARF procedure are executed using a stack, so we have
17515 to define some calling convention for them to interact. Let's say that:
17516
17517 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17518 all arguments in reverse order (right-to-left) so that when the DWARF
17519 procedure execution starts, the first argument is the top of the stack.
17520
17521 - Then, when returning, the DWARF procedure must have consumed all arguments
17522 on the stack, must have pushed the result and touched nothing else.
17523
17524 - Each integral argument and the result are integral types can be hold in a
17525 single stack slot.
17526
17527 - We call "frame offset" the number of stack slots that are "under DWARF
17528 procedure control": it includes the arguments slots, the temporaries and
17529 the result slot. Thus, it is equal to the number of arguments when the
17530 procedure execution starts and must be equal to one (the result) when it
17531 returns. */
17532
17533 /* Helper structure used when generating operations for a DWARF procedure. */
17534 struct dwarf_procedure_info
17535 {
17536 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17537 currently translated. */
17538 tree fndecl;
17539 /* The number of arguments FNDECL takes. */
17540 unsigned args_count;
17541 };
17542
17543 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17544 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17545 equate it to this DIE. */
17546
17547 static dw_die_ref
17548 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17549 dw_die_ref parent_die)
17550 {
17551 dw_die_ref dwarf_proc_die;
17552
17553 if ((dwarf_version < 3 && dwarf_strict)
17554 || location == NULL)
17555 return NULL;
17556
17557 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17558 if (fndecl)
17559 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17560 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17561 return dwarf_proc_die;
17562 }
17563
17564 /* Return whether TYPE is a supported type as a DWARF procedure argument
17565 type or return type (we handle only scalar types and pointer types that
17566 aren't wider than the DWARF expression evaluation stack. */
17567
17568 static bool
17569 is_handled_procedure_type (tree type)
17570 {
17571 return ((INTEGRAL_TYPE_P (type)
17572 || TREE_CODE (type) == OFFSET_TYPE
17573 || TREE_CODE (type) == POINTER_TYPE)
17574 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17575 }
17576
17577 /* Helper for resolve_args_picking: do the same but stop when coming across
17578 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17579 offset *before* evaluating the corresponding operation. */
17580
17581 static bool
17582 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17583 struct dwarf_procedure_info *dpi,
17584 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17585 {
17586 /* The "frame_offset" identifier is already used to name a macro... */
17587 unsigned frame_offset_ = initial_frame_offset;
17588 dw_loc_descr_ref l;
17589
17590 for (l = loc; l != NULL;)
17591 {
17592 bool existed;
17593 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17594
17595 /* If we already met this node, there is nothing to compute anymore. */
17596 if (existed)
17597 {
17598 /* Make sure that the stack size is consistent wherever the execution
17599 flow comes from. */
17600 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17601 break;
17602 }
17603 l_frame_offset = frame_offset_;
17604
17605 /* If needed, relocate the picking offset with respect to the frame
17606 offset. */
17607 if (l->frame_offset_rel)
17608 {
17609 unsigned HOST_WIDE_INT off;
17610 switch (l->dw_loc_opc)
17611 {
17612 case DW_OP_pick:
17613 off = l->dw_loc_oprnd1.v.val_unsigned;
17614 break;
17615 case DW_OP_dup:
17616 off = 0;
17617 break;
17618 case DW_OP_over:
17619 off = 1;
17620 break;
17621 default:
17622 gcc_unreachable ();
17623 }
17624 /* frame_offset_ is the size of the current stack frame, including
17625 incoming arguments. Besides, the arguments are pushed
17626 right-to-left. Thus, in order to access the Nth argument from
17627 this operation node, the picking has to skip temporaries *plus*
17628 one stack slot per argument (0 for the first one, 1 for the second
17629 one, etc.).
17630
17631 The targetted argument number (N) is already set as the operand,
17632 and the number of temporaries can be computed with:
17633 frame_offsets_ - dpi->args_count */
17634 off += frame_offset_ - dpi->args_count;
17635
17636 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17637 if (off > 255)
17638 return false;
17639
17640 if (off == 0)
17641 {
17642 l->dw_loc_opc = DW_OP_dup;
17643 l->dw_loc_oprnd1.v.val_unsigned = 0;
17644 }
17645 else if (off == 1)
17646 {
17647 l->dw_loc_opc = DW_OP_over;
17648 l->dw_loc_oprnd1.v.val_unsigned = 0;
17649 }
17650 else
17651 {
17652 l->dw_loc_opc = DW_OP_pick;
17653 l->dw_loc_oprnd1.v.val_unsigned = off;
17654 }
17655 }
17656
17657 /* Update frame_offset according to the effect the current operation has
17658 on the stack. */
17659 switch (l->dw_loc_opc)
17660 {
17661 case DW_OP_deref:
17662 case DW_OP_swap:
17663 case DW_OP_rot:
17664 case DW_OP_abs:
17665 case DW_OP_neg:
17666 case DW_OP_not:
17667 case DW_OP_plus_uconst:
17668 case DW_OP_skip:
17669 case DW_OP_reg0:
17670 case DW_OP_reg1:
17671 case DW_OP_reg2:
17672 case DW_OP_reg3:
17673 case DW_OP_reg4:
17674 case DW_OP_reg5:
17675 case DW_OP_reg6:
17676 case DW_OP_reg7:
17677 case DW_OP_reg8:
17678 case DW_OP_reg9:
17679 case DW_OP_reg10:
17680 case DW_OP_reg11:
17681 case DW_OP_reg12:
17682 case DW_OP_reg13:
17683 case DW_OP_reg14:
17684 case DW_OP_reg15:
17685 case DW_OP_reg16:
17686 case DW_OP_reg17:
17687 case DW_OP_reg18:
17688 case DW_OP_reg19:
17689 case DW_OP_reg20:
17690 case DW_OP_reg21:
17691 case DW_OP_reg22:
17692 case DW_OP_reg23:
17693 case DW_OP_reg24:
17694 case DW_OP_reg25:
17695 case DW_OP_reg26:
17696 case DW_OP_reg27:
17697 case DW_OP_reg28:
17698 case DW_OP_reg29:
17699 case DW_OP_reg30:
17700 case DW_OP_reg31:
17701 case DW_OP_bregx:
17702 case DW_OP_piece:
17703 case DW_OP_deref_size:
17704 case DW_OP_nop:
17705 case DW_OP_bit_piece:
17706 case DW_OP_implicit_value:
17707 case DW_OP_stack_value:
17708 break;
17709
17710 case DW_OP_addr:
17711 case DW_OP_const1u:
17712 case DW_OP_const1s:
17713 case DW_OP_const2u:
17714 case DW_OP_const2s:
17715 case DW_OP_const4u:
17716 case DW_OP_const4s:
17717 case DW_OP_const8u:
17718 case DW_OP_const8s:
17719 case DW_OP_constu:
17720 case DW_OP_consts:
17721 case DW_OP_dup:
17722 case DW_OP_over:
17723 case DW_OP_pick:
17724 case DW_OP_lit0:
17725 case DW_OP_lit1:
17726 case DW_OP_lit2:
17727 case DW_OP_lit3:
17728 case DW_OP_lit4:
17729 case DW_OP_lit5:
17730 case DW_OP_lit6:
17731 case DW_OP_lit7:
17732 case DW_OP_lit8:
17733 case DW_OP_lit9:
17734 case DW_OP_lit10:
17735 case DW_OP_lit11:
17736 case DW_OP_lit12:
17737 case DW_OP_lit13:
17738 case DW_OP_lit14:
17739 case DW_OP_lit15:
17740 case DW_OP_lit16:
17741 case DW_OP_lit17:
17742 case DW_OP_lit18:
17743 case DW_OP_lit19:
17744 case DW_OP_lit20:
17745 case DW_OP_lit21:
17746 case DW_OP_lit22:
17747 case DW_OP_lit23:
17748 case DW_OP_lit24:
17749 case DW_OP_lit25:
17750 case DW_OP_lit26:
17751 case DW_OP_lit27:
17752 case DW_OP_lit28:
17753 case DW_OP_lit29:
17754 case DW_OP_lit30:
17755 case DW_OP_lit31:
17756 case DW_OP_breg0:
17757 case DW_OP_breg1:
17758 case DW_OP_breg2:
17759 case DW_OP_breg3:
17760 case DW_OP_breg4:
17761 case DW_OP_breg5:
17762 case DW_OP_breg6:
17763 case DW_OP_breg7:
17764 case DW_OP_breg8:
17765 case DW_OP_breg9:
17766 case DW_OP_breg10:
17767 case DW_OP_breg11:
17768 case DW_OP_breg12:
17769 case DW_OP_breg13:
17770 case DW_OP_breg14:
17771 case DW_OP_breg15:
17772 case DW_OP_breg16:
17773 case DW_OP_breg17:
17774 case DW_OP_breg18:
17775 case DW_OP_breg19:
17776 case DW_OP_breg20:
17777 case DW_OP_breg21:
17778 case DW_OP_breg22:
17779 case DW_OP_breg23:
17780 case DW_OP_breg24:
17781 case DW_OP_breg25:
17782 case DW_OP_breg26:
17783 case DW_OP_breg27:
17784 case DW_OP_breg28:
17785 case DW_OP_breg29:
17786 case DW_OP_breg30:
17787 case DW_OP_breg31:
17788 case DW_OP_fbreg:
17789 case DW_OP_push_object_address:
17790 case DW_OP_call_frame_cfa:
17791 case DW_OP_GNU_variable_value:
17792 ++frame_offset_;
17793 break;
17794
17795 case DW_OP_drop:
17796 case DW_OP_xderef:
17797 case DW_OP_and:
17798 case DW_OP_div:
17799 case DW_OP_minus:
17800 case DW_OP_mod:
17801 case DW_OP_mul:
17802 case DW_OP_or:
17803 case DW_OP_plus:
17804 case DW_OP_shl:
17805 case DW_OP_shr:
17806 case DW_OP_shra:
17807 case DW_OP_xor:
17808 case DW_OP_bra:
17809 case DW_OP_eq:
17810 case DW_OP_ge:
17811 case DW_OP_gt:
17812 case DW_OP_le:
17813 case DW_OP_lt:
17814 case DW_OP_ne:
17815 case DW_OP_regx:
17816 case DW_OP_xderef_size:
17817 --frame_offset_;
17818 break;
17819
17820 case DW_OP_call2:
17821 case DW_OP_call4:
17822 case DW_OP_call_ref:
17823 {
17824 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17825 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17826
17827 if (stack_usage == NULL)
17828 return false;
17829 frame_offset_ += *stack_usage;
17830 break;
17831 }
17832
17833 case DW_OP_implicit_pointer:
17834 case DW_OP_entry_value:
17835 case DW_OP_const_type:
17836 case DW_OP_regval_type:
17837 case DW_OP_deref_type:
17838 case DW_OP_convert:
17839 case DW_OP_reinterpret:
17840 case DW_OP_form_tls_address:
17841 case DW_OP_GNU_push_tls_address:
17842 case DW_OP_GNU_uninit:
17843 case DW_OP_GNU_encoded_addr:
17844 case DW_OP_GNU_implicit_pointer:
17845 case DW_OP_GNU_entry_value:
17846 case DW_OP_GNU_const_type:
17847 case DW_OP_GNU_regval_type:
17848 case DW_OP_GNU_deref_type:
17849 case DW_OP_GNU_convert:
17850 case DW_OP_GNU_reinterpret:
17851 case DW_OP_GNU_parameter_ref:
17852 /* loc_list_from_tree will probably not output these operations for
17853 size functions, so assume they will not appear here. */
17854 /* Fall through... */
17855
17856 default:
17857 gcc_unreachable ();
17858 }
17859
17860 /* Now, follow the control flow (except subroutine calls). */
17861 switch (l->dw_loc_opc)
17862 {
17863 case DW_OP_bra:
17864 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17865 frame_offsets))
17866 return false;
17867 /* Fall through. */
17868
17869 case DW_OP_skip:
17870 l = l->dw_loc_oprnd1.v.val_loc;
17871 break;
17872
17873 case DW_OP_stack_value:
17874 return true;
17875
17876 default:
17877 l = l->dw_loc_next;
17878 break;
17879 }
17880 }
17881
17882 return true;
17883 }
17884
17885 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17886 operations) in order to resolve the operand of DW_OP_pick operations that
17887 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17888 offset *before* LOC is executed. Return if all relocations were
17889 successful. */
17890
17891 static bool
17892 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17893 struct dwarf_procedure_info *dpi)
17894 {
17895 /* Associate to all visited operations the frame offset *before* evaluating
17896 this operation. */
17897 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17898
17899 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17900 frame_offsets);
17901 }
17902
17903 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17904 Return NULL if it is not possible. */
17905
17906 static dw_die_ref
17907 function_to_dwarf_procedure (tree fndecl)
17908 {
17909 struct loc_descr_context ctx;
17910 struct dwarf_procedure_info dpi;
17911 dw_die_ref dwarf_proc_die;
17912 tree tree_body = DECL_SAVED_TREE (fndecl);
17913 dw_loc_descr_ref loc_body, epilogue;
17914
17915 tree cursor;
17916 unsigned i;
17917
17918 /* Do not generate multiple DWARF procedures for the same function
17919 declaration. */
17920 dwarf_proc_die = lookup_decl_die (fndecl);
17921 if (dwarf_proc_die != NULL)
17922 return dwarf_proc_die;
17923
17924 /* DWARF procedures are available starting with the DWARFv3 standard. */
17925 if (dwarf_version < 3 && dwarf_strict)
17926 return NULL;
17927
17928 /* We handle only functions for which we still have a body, that return a
17929 supported type and that takes arguments with supported types. Note that
17930 there is no point translating functions that return nothing. */
17931 if (tree_body == NULL_TREE
17932 || DECL_RESULT (fndecl) == NULL_TREE
17933 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17934 return NULL;
17935
17936 for (cursor = DECL_ARGUMENTS (fndecl);
17937 cursor != NULL_TREE;
17938 cursor = TREE_CHAIN (cursor))
17939 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17940 return NULL;
17941
17942 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17943 if (TREE_CODE (tree_body) != RETURN_EXPR)
17944 return NULL;
17945 tree_body = TREE_OPERAND (tree_body, 0);
17946 if (TREE_CODE (tree_body) != MODIFY_EXPR
17947 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17948 return NULL;
17949 tree_body = TREE_OPERAND (tree_body, 1);
17950
17951 /* Try to translate the body expression itself. Note that this will probably
17952 cause an infinite recursion if its call graph has a cycle. This is very
17953 unlikely for size functions, however, so don't bother with such things at
17954 the moment. */
17955 ctx.context_type = NULL_TREE;
17956 ctx.base_decl = NULL_TREE;
17957 ctx.dpi = &dpi;
17958 ctx.placeholder_arg = false;
17959 ctx.placeholder_seen = false;
17960 dpi.fndecl = fndecl;
17961 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17962 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17963 if (!loc_body)
17964 return NULL;
17965
17966 /* After evaluating all operands in "loc_body", we should still have on the
17967 stack all arguments plus the desired function result (top of the stack).
17968 Generate code in order to keep only the result in our stack frame. */
17969 epilogue = NULL;
17970 for (i = 0; i < dpi.args_count; ++i)
17971 {
17972 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17973 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17974 op_couple->dw_loc_next->dw_loc_next = epilogue;
17975 epilogue = op_couple;
17976 }
17977 add_loc_descr (&loc_body, epilogue);
17978 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17979 return NULL;
17980
17981 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17982 because they are considered useful. Now there is an epilogue, they are
17983 not anymore, so give it another try. */
17984 loc_descr_without_nops (loc_body);
17985
17986 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17987 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17988 though, given that size functions do not come from source, so they should
17989 not have a dedicated DW_TAG_subprogram DIE. */
17990 dwarf_proc_die
17991 = new_dwarf_proc_die (loc_body, fndecl,
17992 get_context_die (DECL_CONTEXT (fndecl)));
17993
17994 /* The called DWARF procedure consumes one stack slot per argument and
17995 returns one stack slot. */
17996 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17997
17998 return dwarf_proc_die;
17999 }
18000
18001
18002 /* Generate Dwarf location list representing LOC.
18003 If WANT_ADDRESS is false, expression computing LOC will be computed
18004 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18005 if WANT_ADDRESS is 2, expression computing address useable in location
18006 will be returned (i.e. DW_OP_reg can be used
18007 to refer to register values).
18008
18009 CONTEXT provides information to customize the location descriptions
18010 generation. Its context_type field specifies what type is implicitly
18011 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18012 will not be generated.
18013
18014 Its DPI field determines whether we are generating a DWARF expression for a
18015 DWARF procedure, so PARM_DECL references are processed specifically.
18016
18017 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18018 and dpi fields were null. */
18019
18020 static dw_loc_list_ref
18021 loc_list_from_tree_1 (tree loc, int want_address,
18022 struct loc_descr_context *context)
18023 {
18024 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18025 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18026 int have_address = 0;
18027 enum dwarf_location_atom op;
18028
18029 /* ??? Most of the time we do not take proper care for sign/zero
18030 extending the values properly. Hopefully this won't be a real
18031 problem... */
18032
18033 if (context != NULL
18034 && context->base_decl == loc
18035 && want_address == 0)
18036 {
18037 if (dwarf_version >= 3 || !dwarf_strict)
18038 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18039 NULL, 0, NULL, 0, NULL);
18040 else
18041 return NULL;
18042 }
18043
18044 switch (TREE_CODE (loc))
18045 {
18046 case ERROR_MARK:
18047 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18048 return 0;
18049
18050 case PLACEHOLDER_EXPR:
18051 /* This case involves extracting fields from an object to determine the
18052 position of other fields. It is supposed to appear only as the first
18053 operand of COMPONENT_REF nodes and to reference precisely the type
18054 that the context allows. */
18055 if (context != NULL
18056 && TREE_TYPE (loc) == context->context_type
18057 && want_address >= 1)
18058 {
18059 if (dwarf_version >= 3 || !dwarf_strict)
18060 {
18061 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18062 have_address = 1;
18063 break;
18064 }
18065 else
18066 return NULL;
18067 }
18068 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18069 the single argument passed by consumer. */
18070 else if (context != NULL
18071 && context->placeholder_arg
18072 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18073 && want_address == 0)
18074 {
18075 ret = new_loc_descr (DW_OP_pick, 0, 0);
18076 ret->frame_offset_rel = 1;
18077 context->placeholder_seen = true;
18078 break;
18079 }
18080 else
18081 expansion_failed (loc, NULL_RTX,
18082 "PLACEHOLDER_EXPR for an unexpected type");
18083 break;
18084
18085 case CALL_EXPR:
18086 {
18087 const int nargs = call_expr_nargs (loc);
18088 tree callee = get_callee_fndecl (loc);
18089 int i;
18090 dw_die_ref dwarf_proc;
18091
18092 if (callee == NULL_TREE)
18093 goto call_expansion_failed;
18094
18095 /* We handle only functions that return an integer. */
18096 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18097 goto call_expansion_failed;
18098
18099 dwarf_proc = function_to_dwarf_procedure (callee);
18100 if (dwarf_proc == NULL)
18101 goto call_expansion_failed;
18102
18103 /* Evaluate arguments right-to-left so that the first argument will
18104 be the top-most one on the stack. */
18105 for (i = nargs - 1; i >= 0; --i)
18106 {
18107 dw_loc_descr_ref loc_descr
18108 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18109 context);
18110
18111 if (loc_descr == NULL)
18112 goto call_expansion_failed;
18113
18114 add_loc_descr (&ret, loc_descr);
18115 }
18116
18117 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18118 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18119 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18120 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18121 add_loc_descr (&ret, ret1);
18122 break;
18123
18124 call_expansion_failed:
18125 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18126 /* There are no opcodes for these operations. */
18127 return 0;
18128 }
18129
18130 case PREINCREMENT_EXPR:
18131 case PREDECREMENT_EXPR:
18132 case POSTINCREMENT_EXPR:
18133 case POSTDECREMENT_EXPR:
18134 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18135 /* There are no opcodes for these operations. */
18136 return 0;
18137
18138 case ADDR_EXPR:
18139 /* If we already want an address, see if there is INDIRECT_REF inside
18140 e.g. for &this->field. */
18141 if (want_address)
18142 {
18143 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18144 (loc, want_address == 2, context);
18145 if (list_ret)
18146 have_address = 1;
18147 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18148 && (ret = cst_pool_loc_descr (loc)))
18149 have_address = 1;
18150 }
18151 /* Otherwise, process the argument and look for the address. */
18152 if (!list_ret && !ret)
18153 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18154 else
18155 {
18156 if (want_address)
18157 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18158 return NULL;
18159 }
18160 break;
18161
18162 case VAR_DECL:
18163 if (DECL_THREAD_LOCAL_P (loc))
18164 {
18165 rtx rtl;
18166 enum dwarf_location_atom tls_op;
18167 enum dtprel_bool dtprel = dtprel_false;
18168
18169 if (targetm.have_tls)
18170 {
18171 /* If this is not defined, we have no way to emit the
18172 data. */
18173 if (!targetm.asm_out.output_dwarf_dtprel)
18174 return 0;
18175
18176 /* The way DW_OP_GNU_push_tls_address is specified, we
18177 can only look up addresses of objects in the current
18178 module. We used DW_OP_addr as first op, but that's
18179 wrong, because DW_OP_addr is relocated by the debug
18180 info consumer, while DW_OP_GNU_push_tls_address
18181 operand shouldn't be. */
18182 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18183 return 0;
18184 dtprel = dtprel_true;
18185 /* We check for DWARF 5 here because gdb did not implement
18186 DW_OP_form_tls_address until after 7.12. */
18187 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18188 : DW_OP_GNU_push_tls_address);
18189 }
18190 else
18191 {
18192 if (!targetm.emutls.debug_form_tls_address
18193 || !(dwarf_version >= 3 || !dwarf_strict))
18194 return 0;
18195 /* We stuffed the control variable into the DECL_VALUE_EXPR
18196 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18197 no longer appear in gimple code. We used the control
18198 variable in specific so that we could pick it up here. */
18199 loc = DECL_VALUE_EXPR (loc);
18200 tls_op = DW_OP_form_tls_address;
18201 }
18202
18203 rtl = rtl_for_decl_location (loc);
18204 if (rtl == NULL_RTX)
18205 return 0;
18206
18207 if (!MEM_P (rtl))
18208 return 0;
18209 rtl = XEXP (rtl, 0);
18210 if (! CONSTANT_P (rtl))
18211 return 0;
18212
18213 ret = new_addr_loc_descr (rtl, dtprel);
18214 ret1 = new_loc_descr (tls_op, 0, 0);
18215 add_loc_descr (&ret, ret1);
18216
18217 have_address = 1;
18218 break;
18219 }
18220 /* FALLTHRU */
18221
18222 case PARM_DECL:
18223 if (context != NULL && context->dpi != NULL
18224 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18225 {
18226 /* We are generating code for a DWARF procedure and we want to access
18227 one of its arguments: find the appropriate argument offset and let
18228 the resolve_args_picking pass compute the offset that complies
18229 with the stack frame size. */
18230 unsigned i = 0;
18231 tree cursor;
18232
18233 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18234 cursor != NULL_TREE && cursor != loc;
18235 cursor = TREE_CHAIN (cursor), ++i)
18236 ;
18237 /* If we are translating a DWARF procedure, all referenced parameters
18238 must belong to the current function. */
18239 gcc_assert (cursor != NULL_TREE);
18240
18241 ret = new_loc_descr (DW_OP_pick, i, 0);
18242 ret->frame_offset_rel = 1;
18243 break;
18244 }
18245 /* FALLTHRU */
18246
18247 case RESULT_DECL:
18248 if (DECL_HAS_VALUE_EXPR_P (loc))
18249 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18250 want_address, context);
18251 /* FALLTHRU */
18252
18253 case FUNCTION_DECL:
18254 {
18255 rtx rtl;
18256 var_loc_list *loc_list = lookup_decl_loc (loc);
18257
18258 if (loc_list && loc_list->first)
18259 {
18260 list_ret = dw_loc_list (loc_list, loc, want_address);
18261 have_address = want_address != 0;
18262 break;
18263 }
18264 rtl = rtl_for_decl_location (loc);
18265 if (rtl == NULL_RTX)
18266 {
18267 if (TREE_CODE (loc) != FUNCTION_DECL
18268 && early_dwarf
18269 && current_function_decl
18270 && want_address != 1
18271 && ! DECL_IGNORED_P (loc)
18272 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18273 || POINTER_TYPE_P (TREE_TYPE (loc)))
18274 && DECL_CONTEXT (loc) == current_function_decl
18275 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18276 <= DWARF2_ADDR_SIZE))
18277 {
18278 dw_die_ref ref = lookup_decl_die (loc);
18279 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18280 if (ref)
18281 {
18282 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18283 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18284 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18285 }
18286 else
18287 {
18288 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18289 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18290 }
18291 break;
18292 }
18293 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18294 return 0;
18295 }
18296 else if (CONST_INT_P (rtl))
18297 {
18298 HOST_WIDE_INT val = INTVAL (rtl);
18299 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18300 val &= GET_MODE_MASK (DECL_MODE (loc));
18301 ret = int_loc_descriptor (val);
18302 }
18303 else if (GET_CODE (rtl) == CONST_STRING)
18304 {
18305 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18306 return 0;
18307 }
18308 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18309 ret = new_addr_loc_descr (rtl, dtprel_false);
18310 else
18311 {
18312 machine_mode mode, mem_mode;
18313
18314 /* Certain constructs can only be represented at top-level. */
18315 if (want_address == 2)
18316 {
18317 ret = loc_descriptor (rtl, VOIDmode,
18318 VAR_INIT_STATUS_INITIALIZED);
18319 have_address = 1;
18320 }
18321 else
18322 {
18323 mode = GET_MODE (rtl);
18324 mem_mode = VOIDmode;
18325 if (MEM_P (rtl))
18326 {
18327 mem_mode = mode;
18328 mode = get_address_mode (rtl);
18329 rtl = XEXP (rtl, 0);
18330 have_address = 1;
18331 }
18332 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18333 VAR_INIT_STATUS_INITIALIZED);
18334 }
18335 if (!ret)
18336 expansion_failed (loc, rtl,
18337 "failed to produce loc descriptor for rtl");
18338 }
18339 }
18340 break;
18341
18342 case MEM_REF:
18343 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18344 {
18345 have_address = 1;
18346 goto do_plus;
18347 }
18348 /* Fallthru. */
18349 case INDIRECT_REF:
18350 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18351 have_address = 1;
18352 break;
18353
18354 case TARGET_MEM_REF:
18355 case SSA_NAME:
18356 case DEBUG_EXPR_DECL:
18357 return NULL;
18358
18359 case COMPOUND_EXPR:
18360 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18361 context);
18362
18363 CASE_CONVERT:
18364 case VIEW_CONVERT_EXPR:
18365 case SAVE_EXPR:
18366 case MODIFY_EXPR:
18367 case NON_LVALUE_EXPR:
18368 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18369 context);
18370
18371 case COMPONENT_REF:
18372 case BIT_FIELD_REF:
18373 case ARRAY_REF:
18374 case ARRAY_RANGE_REF:
18375 case REALPART_EXPR:
18376 case IMAGPART_EXPR:
18377 {
18378 tree obj, offset;
18379 poly_int64 bitsize, bitpos, bytepos;
18380 machine_mode mode;
18381 int unsignedp, reversep, volatilep = 0;
18382
18383 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18384 &unsignedp, &reversep, &volatilep);
18385
18386 gcc_assert (obj != loc);
18387
18388 list_ret = loc_list_from_tree_1 (obj,
18389 want_address == 2
18390 && known_eq (bitpos, 0)
18391 && !offset ? 2 : 1,
18392 context);
18393 /* TODO: We can extract value of the small expression via shifting even
18394 for nonzero bitpos. */
18395 if (list_ret == 0)
18396 return 0;
18397 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18398 || !multiple_p (bitsize, BITS_PER_UNIT))
18399 {
18400 expansion_failed (loc, NULL_RTX,
18401 "bitfield access");
18402 return 0;
18403 }
18404
18405 if (offset != NULL_TREE)
18406 {
18407 /* Variable offset. */
18408 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18409 if (list_ret1 == 0)
18410 return 0;
18411 add_loc_list (&list_ret, list_ret1);
18412 if (!list_ret)
18413 return 0;
18414 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18415 }
18416
18417 HOST_WIDE_INT value;
18418 if (bytepos.is_constant (&value) && value > 0)
18419 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18420 value, 0));
18421 else if (maybe_ne (bytepos, 0))
18422 loc_list_plus_const (list_ret, bytepos);
18423
18424 have_address = 1;
18425 break;
18426 }
18427
18428 case INTEGER_CST:
18429 if ((want_address || !tree_fits_shwi_p (loc))
18430 && (ret = cst_pool_loc_descr (loc)))
18431 have_address = 1;
18432 else if (want_address == 2
18433 && tree_fits_shwi_p (loc)
18434 && (ret = address_of_int_loc_descriptor
18435 (int_size_in_bytes (TREE_TYPE (loc)),
18436 tree_to_shwi (loc))))
18437 have_address = 1;
18438 else if (tree_fits_shwi_p (loc))
18439 ret = int_loc_descriptor (tree_to_shwi (loc));
18440 else if (tree_fits_uhwi_p (loc))
18441 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18442 else
18443 {
18444 expansion_failed (loc, NULL_RTX,
18445 "Integer operand is not host integer");
18446 return 0;
18447 }
18448 break;
18449
18450 case CONSTRUCTOR:
18451 case REAL_CST:
18452 case STRING_CST:
18453 case COMPLEX_CST:
18454 if ((ret = cst_pool_loc_descr (loc)))
18455 have_address = 1;
18456 else if (TREE_CODE (loc) == CONSTRUCTOR)
18457 {
18458 tree type = TREE_TYPE (loc);
18459 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18460 unsigned HOST_WIDE_INT offset = 0;
18461 unsigned HOST_WIDE_INT cnt;
18462 constructor_elt *ce;
18463
18464 if (TREE_CODE (type) == RECORD_TYPE)
18465 {
18466 /* This is very limited, but it's enough to output
18467 pointers to member functions, as long as the
18468 referenced function is defined in the current
18469 translation unit. */
18470 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18471 {
18472 tree val = ce->value;
18473
18474 tree field = ce->index;
18475
18476 if (val)
18477 STRIP_NOPS (val);
18478
18479 if (!field || DECL_BIT_FIELD (field))
18480 {
18481 expansion_failed (loc, NULL_RTX,
18482 "bitfield in record type constructor");
18483 size = offset = (unsigned HOST_WIDE_INT)-1;
18484 ret = NULL;
18485 break;
18486 }
18487
18488 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18489 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18490 gcc_assert (pos + fieldsize <= size);
18491 if (pos < offset)
18492 {
18493 expansion_failed (loc, NULL_RTX,
18494 "out-of-order fields in record constructor");
18495 size = offset = (unsigned HOST_WIDE_INT)-1;
18496 ret = NULL;
18497 break;
18498 }
18499 if (pos > offset)
18500 {
18501 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18502 add_loc_descr (&ret, ret1);
18503 offset = pos;
18504 }
18505 if (val && fieldsize != 0)
18506 {
18507 ret1 = loc_descriptor_from_tree (val, want_address, context);
18508 if (!ret1)
18509 {
18510 expansion_failed (loc, NULL_RTX,
18511 "unsupported expression in field");
18512 size = offset = (unsigned HOST_WIDE_INT)-1;
18513 ret = NULL;
18514 break;
18515 }
18516 add_loc_descr (&ret, ret1);
18517 }
18518 if (fieldsize)
18519 {
18520 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18521 add_loc_descr (&ret, ret1);
18522 offset = pos + fieldsize;
18523 }
18524 }
18525
18526 if (offset != size)
18527 {
18528 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18529 add_loc_descr (&ret, ret1);
18530 offset = size;
18531 }
18532
18533 have_address = !!want_address;
18534 }
18535 else
18536 expansion_failed (loc, NULL_RTX,
18537 "constructor of non-record type");
18538 }
18539 else
18540 /* We can construct small constants here using int_loc_descriptor. */
18541 expansion_failed (loc, NULL_RTX,
18542 "constructor or constant not in constant pool");
18543 break;
18544
18545 case TRUTH_AND_EXPR:
18546 case TRUTH_ANDIF_EXPR:
18547 case BIT_AND_EXPR:
18548 op = DW_OP_and;
18549 goto do_binop;
18550
18551 case TRUTH_XOR_EXPR:
18552 case BIT_XOR_EXPR:
18553 op = DW_OP_xor;
18554 goto do_binop;
18555
18556 case TRUTH_OR_EXPR:
18557 case TRUTH_ORIF_EXPR:
18558 case BIT_IOR_EXPR:
18559 op = DW_OP_or;
18560 goto do_binop;
18561
18562 case FLOOR_DIV_EXPR:
18563 case CEIL_DIV_EXPR:
18564 case ROUND_DIV_EXPR:
18565 case TRUNC_DIV_EXPR:
18566 case EXACT_DIV_EXPR:
18567 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18568 return 0;
18569 op = DW_OP_div;
18570 goto do_binop;
18571
18572 case MINUS_EXPR:
18573 op = DW_OP_minus;
18574 goto do_binop;
18575
18576 case FLOOR_MOD_EXPR:
18577 case CEIL_MOD_EXPR:
18578 case ROUND_MOD_EXPR:
18579 case TRUNC_MOD_EXPR:
18580 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18581 {
18582 op = DW_OP_mod;
18583 goto do_binop;
18584 }
18585 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18586 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18587 if (list_ret == 0 || list_ret1 == 0)
18588 return 0;
18589
18590 add_loc_list (&list_ret, list_ret1);
18591 if (list_ret == 0)
18592 return 0;
18593 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18594 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18595 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18596 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18597 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18598 break;
18599
18600 case MULT_EXPR:
18601 op = DW_OP_mul;
18602 goto do_binop;
18603
18604 case LSHIFT_EXPR:
18605 op = DW_OP_shl;
18606 goto do_binop;
18607
18608 case RSHIFT_EXPR:
18609 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18610 goto do_binop;
18611
18612 case POINTER_PLUS_EXPR:
18613 case PLUS_EXPR:
18614 do_plus:
18615 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18616 {
18617 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18618 smarter to encode their opposite. The DW_OP_plus_uconst operation
18619 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18620 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18621 bytes, Y being the size of the operation that pushes the opposite
18622 of the addend. So let's choose the smallest representation. */
18623 const tree tree_addend = TREE_OPERAND (loc, 1);
18624 offset_int wi_addend;
18625 HOST_WIDE_INT shwi_addend;
18626 dw_loc_descr_ref loc_naddend;
18627
18628 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18629 if (list_ret == 0)
18630 return 0;
18631
18632 /* Try to get the literal to push. It is the opposite of the addend,
18633 so as we rely on wrapping during DWARF evaluation, first decode
18634 the literal as a "DWARF-sized" signed number. */
18635 wi_addend = wi::to_offset (tree_addend);
18636 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18637 shwi_addend = wi_addend.to_shwi ();
18638 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18639 ? int_loc_descriptor (-shwi_addend)
18640 : NULL;
18641
18642 if (loc_naddend != NULL
18643 && ((unsigned) size_of_uleb128 (shwi_addend)
18644 > size_of_loc_descr (loc_naddend)))
18645 {
18646 add_loc_descr_to_each (list_ret, loc_naddend);
18647 add_loc_descr_to_each (list_ret,
18648 new_loc_descr (DW_OP_minus, 0, 0));
18649 }
18650 else
18651 {
18652 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18653 {
18654 loc_naddend = loc_cur;
18655 loc_cur = loc_cur->dw_loc_next;
18656 ggc_free (loc_naddend);
18657 }
18658 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18659 }
18660 break;
18661 }
18662
18663 op = DW_OP_plus;
18664 goto do_binop;
18665
18666 case LE_EXPR:
18667 op = DW_OP_le;
18668 goto do_comp_binop;
18669
18670 case GE_EXPR:
18671 op = DW_OP_ge;
18672 goto do_comp_binop;
18673
18674 case LT_EXPR:
18675 op = DW_OP_lt;
18676 goto do_comp_binop;
18677
18678 case GT_EXPR:
18679 op = DW_OP_gt;
18680 goto do_comp_binop;
18681
18682 do_comp_binop:
18683 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18684 {
18685 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18686 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18687 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18688 TREE_CODE (loc));
18689 break;
18690 }
18691 else
18692 goto do_binop;
18693
18694 case EQ_EXPR:
18695 op = DW_OP_eq;
18696 goto do_binop;
18697
18698 case NE_EXPR:
18699 op = DW_OP_ne;
18700 goto do_binop;
18701
18702 do_binop:
18703 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18704 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18705 if (list_ret == 0 || list_ret1 == 0)
18706 return 0;
18707
18708 add_loc_list (&list_ret, list_ret1);
18709 if (list_ret == 0)
18710 return 0;
18711 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18712 break;
18713
18714 case TRUTH_NOT_EXPR:
18715 case BIT_NOT_EXPR:
18716 op = DW_OP_not;
18717 goto do_unop;
18718
18719 case ABS_EXPR:
18720 op = DW_OP_abs;
18721 goto do_unop;
18722
18723 case NEGATE_EXPR:
18724 op = DW_OP_neg;
18725 goto do_unop;
18726
18727 do_unop:
18728 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18729 if (list_ret == 0)
18730 return 0;
18731
18732 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18733 break;
18734
18735 case MIN_EXPR:
18736 case MAX_EXPR:
18737 {
18738 const enum tree_code code =
18739 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18740
18741 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18742 build2 (code, integer_type_node,
18743 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18744 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18745 }
18746
18747 /* fall through */
18748
18749 case COND_EXPR:
18750 {
18751 dw_loc_descr_ref lhs
18752 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18753 dw_loc_list_ref rhs
18754 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18755 dw_loc_descr_ref bra_node, jump_node, tmp;
18756
18757 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18758 if (list_ret == 0 || lhs == 0 || rhs == 0)
18759 return 0;
18760
18761 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18762 add_loc_descr_to_each (list_ret, bra_node);
18763
18764 add_loc_list (&list_ret, rhs);
18765 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18766 add_loc_descr_to_each (list_ret, jump_node);
18767
18768 add_loc_descr_to_each (list_ret, lhs);
18769 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18770 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18771
18772 /* ??? Need a node to point the skip at. Use a nop. */
18773 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18774 add_loc_descr_to_each (list_ret, tmp);
18775 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18776 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18777 }
18778 break;
18779
18780 case FIX_TRUNC_EXPR:
18781 return 0;
18782
18783 default:
18784 /* Leave front-end specific codes as simply unknown. This comes
18785 up, for instance, with the C STMT_EXPR. */
18786 if ((unsigned int) TREE_CODE (loc)
18787 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18788 {
18789 expansion_failed (loc, NULL_RTX,
18790 "language specific tree node");
18791 return 0;
18792 }
18793
18794 /* Otherwise this is a generic code; we should just lists all of
18795 these explicitly. We forgot one. */
18796 if (flag_checking)
18797 gcc_unreachable ();
18798
18799 /* In a release build, we want to degrade gracefully: better to
18800 generate incomplete debugging information than to crash. */
18801 return NULL;
18802 }
18803
18804 if (!ret && !list_ret)
18805 return 0;
18806
18807 if (want_address == 2 && !have_address
18808 && (dwarf_version >= 4 || !dwarf_strict))
18809 {
18810 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18811 {
18812 expansion_failed (loc, NULL_RTX,
18813 "DWARF address size mismatch");
18814 return 0;
18815 }
18816 if (ret)
18817 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18818 else
18819 add_loc_descr_to_each (list_ret,
18820 new_loc_descr (DW_OP_stack_value, 0, 0));
18821 have_address = 1;
18822 }
18823 /* Show if we can't fill the request for an address. */
18824 if (want_address && !have_address)
18825 {
18826 expansion_failed (loc, NULL_RTX,
18827 "Want address and only have value");
18828 return 0;
18829 }
18830
18831 gcc_assert (!ret || !list_ret);
18832
18833 /* If we've got an address and don't want one, dereference. */
18834 if (!want_address && have_address)
18835 {
18836 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18837
18838 if (size > DWARF2_ADDR_SIZE || size == -1)
18839 {
18840 expansion_failed (loc, NULL_RTX,
18841 "DWARF address size mismatch");
18842 return 0;
18843 }
18844 else if (size == DWARF2_ADDR_SIZE)
18845 op = DW_OP_deref;
18846 else
18847 op = DW_OP_deref_size;
18848
18849 if (ret)
18850 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18851 else
18852 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18853 }
18854 if (ret)
18855 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18856
18857 return list_ret;
18858 }
18859
18860 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18861 expressions. */
18862
18863 static dw_loc_list_ref
18864 loc_list_from_tree (tree loc, int want_address,
18865 struct loc_descr_context *context)
18866 {
18867 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18868
18869 for (dw_loc_list_ref loc_cur = result;
18870 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18871 loc_descr_without_nops (loc_cur->expr);
18872 return result;
18873 }
18874
18875 /* Same as above but return only single location expression. */
18876 static dw_loc_descr_ref
18877 loc_descriptor_from_tree (tree loc, int want_address,
18878 struct loc_descr_context *context)
18879 {
18880 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18881 if (!ret)
18882 return NULL;
18883 if (ret->dw_loc_next)
18884 {
18885 expansion_failed (loc, NULL_RTX,
18886 "Location list where only loc descriptor needed");
18887 return NULL;
18888 }
18889 return ret->expr;
18890 }
18891
18892 /* Given a value, round it up to the lowest multiple of `boundary'
18893 which is not less than the value itself. */
18894
18895 static inline HOST_WIDE_INT
18896 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18897 {
18898 return (((value + boundary - 1) / boundary) * boundary);
18899 }
18900
18901 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18902 pointer to the declared type for the relevant field variable, or return
18903 `integer_type_node' if the given node turns out to be an
18904 ERROR_MARK node. */
18905
18906 static inline tree
18907 field_type (const_tree decl)
18908 {
18909 tree type;
18910
18911 if (TREE_CODE (decl) == ERROR_MARK)
18912 return integer_type_node;
18913
18914 type = DECL_BIT_FIELD_TYPE (decl);
18915 if (type == NULL_TREE)
18916 type = TREE_TYPE (decl);
18917
18918 return type;
18919 }
18920
18921 /* Given a pointer to a tree node, return the alignment in bits for
18922 it, or else return BITS_PER_WORD if the node actually turns out to
18923 be an ERROR_MARK node. */
18924
18925 static inline unsigned
18926 simple_type_align_in_bits (const_tree type)
18927 {
18928 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18929 }
18930
18931 static inline unsigned
18932 simple_decl_align_in_bits (const_tree decl)
18933 {
18934 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18935 }
18936
18937 /* Return the result of rounding T up to ALIGN. */
18938
18939 static inline offset_int
18940 round_up_to_align (const offset_int &t, unsigned int align)
18941 {
18942 return wi::udiv_trunc (t + align - 1, align) * align;
18943 }
18944
18945 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18946 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18947 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18948 if we fail to return the size in one of these two forms. */
18949
18950 static dw_loc_descr_ref
18951 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18952 {
18953 tree tree_size;
18954 struct loc_descr_context ctx;
18955
18956 /* Return a constant integer in priority, if possible. */
18957 *cst_size = int_size_in_bytes (type);
18958 if (*cst_size != -1)
18959 return NULL;
18960
18961 ctx.context_type = const_cast<tree> (type);
18962 ctx.base_decl = NULL_TREE;
18963 ctx.dpi = NULL;
18964 ctx.placeholder_arg = false;
18965 ctx.placeholder_seen = false;
18966
18967 type = TYPE_MAIN_VARIANT (type);
18968 tree_size = TYPE_SIZE_UNIT (type);
18969 return ((tree_size != NULL_TREE)
18970 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18971 : NULL);
18972 }
18973
18974 /* Helper structure for RECORD_TYPE processing. */
18975 struct vlr_context
18976 {
18977 /* Root RECORD_TYPE. It is needed to generate data member location
18978 descriptions in variable-length records (VLR), but also to cope with
18979 variants, which are composed of nested structures multiplexed with
18980 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18981 function processing a FIELD_DECL, it is required to be non null. */
18982 tree struct_type;
18983 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18984 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18985 this variant part as part of the root record (in storage units). For
18986 regular records, it must be NULL_TREE. */
18987 tree variant_part_offset;
18988 };
18989
18990 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18991 addressed byte of the "containing object" for the given FIELD_DECL. If
18992 possible, return a native constant through CST_OFFSET (in which case NULL is
18993 returned); otherwise return a DWARF expression that computes the offset.
18994
18995 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18996 that offset is, either because the argument turns out to be a pointer to an
18997 ERROR_MARK node, or because the offset expression is too complex for us.
18998
18999 CTX is required: see the comment for VLR_CONTEXT. */
19000
19001 static dw_loc_descr_ref
19002 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19003 HOST_WIDE_INT *cst_offset)
19004 {
19005 tree tree_result;
19006 dw_loc_list_ref loc_result;
19007
19008 *cst_offset = 0;
19009
19010 if (TREE_CODE (decl) == ERROR_MARK)
19011 return NULL;
19012 else
19013 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19014
19015 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19016 case. */
19017 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19018 return NULL;
19019
19020 #ifdef PCC_BITFIELD_TYPE_MATTERS
19021 /* We used to handle only constant offsets in all cases. Now, we handle
19022 properly dynamic byte offsets only when PCC bitfield type doesn't
19023 matter. */
19024 if (PCC_BITFIELD_TYPE_MATTERS
19025 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19026 {
19027 offset_int object_offset_in_bits;
19028 offset_int object_offset_in_bytes;
19029 offset_int bitpos_int;
19030 tree type;
19031 tree field_size_tree;
19032 offset_int deepest_bitpos;
19033 offset_int field_size_in_bits;
19034 unsigned int type_align_in_bits;
19035 unsigned int decl_align_in_bits;
19036 offset_int type_size_in_bits;
19037
19038 bitpos_int = wi::to_offset (bit_position (decl));
19039 type = field_type (decl);
19040 type_size_in_bits = offset_int_type_size_in_bits (type);
19041 type_align_in_bits = simple_type_align_in_bits (type);
19042
19043 field_size_tree = DECL_SIZE (decl);
19044
19045 /* The size could be unspecified if there was an error, or for
19046 a flexible array member. */
19047 if (!field_size_tree)
19048 field_size_tree = bitsize_zero_node;
19049
19050 /* If the size of the field is not constant, use the type size. */
19051 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19052 field_size_in_bits = wi::to_offset (field_size_tree);
19053 else
19054 field_size_in_bits = type_size_in_bits;
19055
19056 decl_align_in_bits = simple_decl_align_in_bits (decl);
19057
19058 /* The GCC front-end doesn't make any attempt to keep track of the
19059 starting bit offset (relative to the start of the containing
19060 structure type) of the hypothetical "containing object" for a
19061 bit-field. Thus, when computing the byte offset value for the
19062 start of the "containing object" of a bit-field, we must deduce
19063 this information on our own. This can be rather tricky to do in
19064 some cases. For example, handling the following structure type
19065 definition when compiling for an i386/i486 target (which only
19066 aligns long long's to 32-bit boundaries) can be very tricky:
19067
19068 struct S { int field1; long long field2:31; };
19069
19070 Fortunately, there is a simple rule-of-thumb which can be used
19071 in such cases. When compiling for an i386/i486, GCC will
19072 allocate 8 bytes for the structure shown above. It decides to
19073 do this based upon one simple rule for bit-field allocation.
19074 GCC allocates each "containing object" for each bit-field at
19075 the first (i.e. lowest addressed) legitimate alignment boundary
19076 (based upon the required minimum alignment for the declared
19077 type of the field) which it can possibly use, subject to the
19078 condition that there is still enough available space remaining
19079 in the containing object (when allocated at the selected point)
19080 to fully accommodate all of the bits of the bit-field itself.
19081
19082 This simple rule makes it obvious why GCC allocates 8 bytes for
19083 each object of the structure type shown above. When looking
19084 for a place to allocate the "containing object" for `field2',
19085 the compiler simply tries to allocate a 64-bit "containing
19086 object" at each successive 32-bit boundary (starting at zero)
19087 until it finds a place to allocate that 64- bit field such that
19088 at least 31 contiguous (and previously unallocated) bits remain
19089 within that selected 64 bit field. (As it turns out, for the
19090 example above, the compiler finds it is OK to allocate the
19091 "containing object" 64-bit field at bit-offset zero within the
19092 structure type.)
19093
19094 Here we attempt to work backwards from the limited set of facts
19095 we're given, and we try to deduce from those facts, where GCC
19096 must have believed that the containing object started (within
19097 the structure type). The value we deduce is then used (by the
19098 callers of this routine) to generate DW_AT_location and
19099 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19100 the case of DW_AT_location, regular fields as well). */
19101
19102 /* Figure out the bit-distance from the start of the structure to
19103 the "deepest" bit of the bit-field. */
19104 deepest_bitpos = bitpos_int + field_size_in_bits;
19105
19106 /* This is the tricky part. Use some fancy footwork to deduce
19107 where the lowest addressed bit of the containing object must
19108 be. */
19109 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19110
19111 /* Round up to type_align by default. This works best for
19112 bitfields. */
19113 object_offset_in_bits
19114 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19115
19116 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19117 {
19118 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19119
19120 /* Round up to decl_align instead. */
19121 object_offset_in_bits
19122 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19123 }
19124
19125 object_offset_in_bytes
19126 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19127 if (ctx->variant_part_offset == NULL_TREE)
19128 {
19129 *cst_offset = object_offset_in_bytes.to_shwi ();
19130 return NULL;
19131 }
19132 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19133 }
19134 else
19135 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19136 tree_result = byte_position (decl);
19137
19138 if (ctx->variant_part_offset != NULL_TREE)
19139 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19140 ctx->variant_part_offset, tree_result);
19141
19142 /* If the byte offset is a constant, it's simplier to handle a native
19143 constant rather than a DWARF expression. */
19144 if (TREE_CODE (tree_result) == INTEGER_CST)
19145 {
19146 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19147 return NULL;
19148 }
19149 struct loc_descr_context loc_ctx = {
19150 ctx->struct_type, /* context_type */
19151 NULL_TREE, /* base_decl */
19152 NULL, /* dpi */
19153 false, /* placeholder_arg */
19154 false /* placeholder_seen */
19155 };
19156 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19157
19158 /* We want a DWARF expression: abort if we only have a location list with
19159 multiple elements. */
19160 if (!loc_result || !single_element_loc_list_p (loc_result))
19161 return NULL;
19162 else
19163 return loc_result->expr;
19164 }
19165 \f
19166 /* The following routines define various Dwarf attributes and any data
19167 associated with them. */
19168
19169 /* Add a location description attribute value to a DIE.
19170
19171 This emits location attributes suitable for whole variables and
19172 whole parameters. Note that the location attributes for struct fields are
19173 generated by the routine `data_member_location_attribute' below. */
19174
19175 static inline void
19176 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19177 dw_loc_list_ref descr)
19178 {
19179 bool check_no_locviews = true;
19180 if (descr == 0)
19181 return;
19182 if (single_element_loc_list_p (descr))
19183 add_AT_loc (die, attr_kind, descr->expr);
19184 else
19185 {
19186 add_AT_loc_list (die, attr_kind, descr);
19187 gcc_assert (descr->ll_symbol);
19188 if (attr_kind == DW_AT_location && descr->vl_symbol
19189 && dwarf2out_locviews_in_attribute ())
19190 {
19191 add_AT_view_list (die, DW_AT_GNU_locviews);
19192 check_no_locviews = false;
19193 }
19194 }
19195
19196 if (check_no_locviews)
19197 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19198 }
19199
19200 /* Add DW_AT_accessibility attribute to DIE if needed. */
19201
19202 static void
19203 add_accessibility_attribute (dw_die_ref die, tree decl)
19204 {
19205 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19206 children, otherwise the default is DW_ACCESS_public. In DWARF2
19207 the default has always been DW_ACCESS_public. */
19208 if (TREE_PROTECTED (decl))
19209 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19210 else if (TREE_PRIVATE (decl))
19211 {
19212 if (dwarf_version == 2
19213 || die->die_parent == NULL
19214 || die->die_parent->die_tag != DW_TAG_class_type)
19215 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19216 }
19217 else if (dwarf_version > 2
19218 && die->die_parent
19219 && die->die_parent->die_tag == DW_TAG_class_type)
19220 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19221 }
19222
19223 /* Attach the specialized form of location attribute used for data members of
19224 struct and union types. In the special case of a FIELD_DECL node which
19225 represents a bit-field, the "offset" part of this special location
19226 descriptor must indicate the distance in bytes from the lowest-addressed
19227 byte of the containing struct or union type to the lowest-addressed byte of
19228 the "containing object" for the bit-field. (See the `field_byte_offset'
19229 function above).
19230
19231 For any given bit-field, the "containing object" is a hypothetical object
19232 (of some integral or enum type) within which the given bit-field lives. The
19233 type of this hypothetical "containing object" is always the same as the
19234 declared type of the individual bit-field itself (for GCC anyway... the
19235 DWARF spec doesn't actually mandate this). Note that it is the size (in
19236 bytes) of the hypothetical "containing object" which will be given in the
19237 DW_AT_byte_size attribute for this bit-field. (See the
19238 `byte_size_attribute' function below.) It is also used when calculating the
19239 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19240 function below.)
19241
19242 CTX is required: see the comment for VLR_CONTEXT. */
19243
19244 static void
19245 add_data_member_location_attribute (dw_die_ref die,
19246 tree decl,
19247 struct vlr_context *ctx)
19248 {
19249 HOST_WIDE_INT offset;
19250 dw_loc_descr_ref loc_descr = 0;
19251
19252 if (TREE_CODE (decl) == TREE_BINFO)
19253 {
19254 /* We're working on the TAG_inheritance for a base class. */
19255 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19256 {
19257 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19258 aren't at a fixed offset from all (sub)objects of the same
19259 type. We need to extract the appropriate offset from our
19260 vtable. The following dwarf expression means
19261
19262 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19263
19264 This is specific to the V3 ABI, of course. */
19265
19266 dw_loc_descr_ref tmp;
19267
19268 /* Make a copy of the object address. */
19269 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19270 add_loc_descr (&loc_descr, tmp);
19271
19272 /* Extract the vtable address. */
19273 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19274 add_loc_descr (&loc_descr, tmp);
19275
19276 /* Calculate the address of the offset. */
19277 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19278 gcc_assert (offset < 0);
19279
19280 tmp = int_loc_descriptor (-offset);
19281 add_loc_descr (&loc_descr, tmp);
19282 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19283 add_loc_descr (&loc_descr, tmp);
19284
19285 /* Extract the offset. */
19286 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19287 add_loc_descr (&loc_descr, tmp);
19288
19289 /* Add it to the object address. */
19290 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19291 add_loc_descr (&loc_descr, tmp);
19292 }
19293 else
19294 offset = tree_to_shwi (BINFO_OFFSET (decl));
19295 }
19296 else
19297 {
19298 loc_descr = field_byte_offset (decl, ctx, &offset);
19299
19300 /* If loc_descr is available then we know the field offset is dynamic.
19301 However, GDB does not handle dynamic field offsets very well at the
19302 moment. */
19303 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19304 {
19305 loc_descr = NULL;
19306 offset = 0;
19307 }
19308
19309 /* Data member location evalutation starts with the base address on the
19310 stack. Compute the field offset and add it to this base address. */
19311 else if (loc_descr != NULL)
19312 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19313 }
19314
19315 if (! loc_descr)
19316 {
19317 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19318 e.g. GDB only added support to it in November 2016. For DWARF5
19319 we need newer debug info consumers anyway. We might change this
19320 to dwarf_version >= 4 once most consumers catched up. */
19321 if (dwarf_version >= 5
19322 && TREE_CODE (decl) == FIELD_DECL
19323 && DECL_BIT_FIELD_TYPE (decl))
19324 {
19325 tree off = bit_position (decl);
19326 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19327 {
19328 remove_AT (die, DW_AT_byte_size);
19329 remove_AT (die, DW_AT_bit_offset);
19330 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19331 return;
19332 }
19333 }
19334 if (dwarf_version > 2)
19335 {
19336 /* Don't need to output a location expression, just the constant. */
19337 if (offset < 0)
19338 add_AT_int (die, DW_AT_data_member_location, offset);
19339 else
19340 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19341 return;
19342 }
19343 else
19344 {
19345 enum dwarf_location_atom op;
19346
19347 /* The DWARF2 standard says that we should assume that the structure
19348 address is already on the stack, so we can specify a structure
19349 field address by using DW_OP_plus_uconst. */
19350 op = DW_OP_plus_uconst;
19351 loc_descr = new_loc_descr (op, offset, 0);
19352 }
19353 }
19354
19355 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19356 }
19357
19358 /* Writes integer values to dw_vec_const array. */
19359
19360 static void
19361 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19362 {
19363 while (size != 0)
19364 {
19365 *dest++ = val & 0xff;
19366 val >>= 8;
19367 --size;
19368 }
19369 }
19370
19371 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19372
19373 static HOST_WIDE_INT
19374 extract_int (const unsigned char *src, unsigned int size)
19375 {
19376 HOST_WIDE_INT val = 0;
19377
19378 src += size;
19379 while (size != 0)
19380 {
19381 val <<= 8;
19382 val |= *--src & 0xff;
19383 --size;
19384 }
19385 return val;
19386 }
19387
19388 /* Writes wide_int values to dw_vec_const array. */
19389
19390 static void
19391 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19392 {
19393 int i;
19394
19395 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19396 {
19397 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19398 return;
19399 }
19400
19401 /* We'd have to extend this code to support odd sizes. */
19402 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19403
19404 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19405
19406 if (WORDS_BIG_ENDIAN)
19407 for (i = n - 1; i >= 0; i--)
19408 {
19409 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19410 dest += sizeof (HOST_WIDE_INT);
19411 }
19412 else
19413 for (i = 0; i < n; i++)
19414 {
19415 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19416 dest += sizeof (HOST_WIDE_INT);
19417 }
19418 }
19419
19420 /* Writes floating point values to dw_vec_const array. */
19421
19422 static void
19423 insert_float (const_rtx rtl, unsigned char *array)
19424 {
19425 long val[4];
19426 int i;
19427 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19428
19429 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19430
19431 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19432 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19433 {
19434 insert_int (val[i], 4, array);
19435 array += 4;
19436 }
19437 }
19438
19439 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19440 does not have a "location" either in memory or in a register. These
19441 things can arise in GNU C when a constant is passed as an actual parameter
19442 to an inlined function. They can also arise in C++ where declared
19443 constants do not necessarily get memory "homes". */
19444
19445 static bool
19446 add_const_value_attribute (dw_die_ref die, rtx rtl)
19447 {
19448 switch (GET_CODE (rtl))
19449 {
19450 case CONST_INT:
19451 {
19452 HOST_WIDE_INT val = INTVAL (rtl);
19453
19454 if (val < 0)
19455 add_AT_int (die, DW_AT_const_value, val);
19456 else
19457 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19458 }
19459 return true;
19460
19461 case CONST_WIDE_INT:
19462 {
19463 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19464 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19465 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19466 wide_int w = wi::zext (w1, prec);
19467 add_AT_wide (die, DW_AT_const_value, w);
19468 }
19469 return true;
19470
19471 case CONST_DOUBLE:
19472 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19473 floating-point constant. A CONST_DOUBLE is used whenever the
19474 constant requires more than one word in order to be adequately
19475 represented. */
19476 if (TARGET_SUPPORTS_WIDE_INT == 0
19477 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19478 add_AT_double (die, DW_AT_const_value,
19479 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19480 else
19481 {
19482 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19483 unsigned int length = GET_MODE_SIZE (mode);
19484 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19485
19486 insert_float (rtl, array);
19487 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19488 }
19489 return true;
19490
19491 case CONST_VECTOR:
19492 {
19493 unsigned int length;
19494 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19495 return false;
19496
19497 machine_mode mode = GET_MODE (rtl);
19498 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19499 unsigned char *array
19500 = ggc_vec_alloc<unsigned char> (length * elt_size);
19501 unsigned int i;
19502 unsigned char *p;
19503 machine_mode imode = GET_MODE_INNER (mode);
19504
19505 switch (GET_MODE_CLASS (mode))
19506 {
19507 case MODE_VECTOR_INT:
19508 for (i = 0, p = array; i < length; i++, p += elt_size)
19509 {
19510 rtx elt = CONST_VECTOR_ELT (rtl, i);
19511 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19512 }
19513 break;
19514
19515 case MODE_VECTOR_FLOAT:
19516 for (i = 0, p = array; i < length; i++, p += elt_size)
19517 {
19518 rtx elt = CONST_VECTOR_ELT (rtl, i);
19519 insert_float (elt, p);
19520 }
19521 break;
19522
19523 default:
19524 gcc_unreachable ();
19525 }
19526
19527 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19528 }
19529 return true;
19530
19531 case CONST_STRING:
19532 if (dwarf_version >= 4 || !dwarf_strict)
19533 {
19534 dw_loc_descr_ref loc_result;
19535 resolve_one_addr (&rtl);
19536 rtl_addr:
19537 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19538 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19539 add_AT_loc (die, DW_AT_location, loc_result);
19540 vec_safe_push (used_rtx_array, rtl);
19541 return true;
19542 }
19543 return false;
19544
19545 case CONST:
19546 if (CONSTANT_P (XEXP (rtl, 0)))
19547 return add_const_value_attribute (die, XEXP (rtl, 0));
19548 /* FALLTHROUGH */
19549 case SYMBOL_REF:
19550 if (!const_ok_for_output (rtl))
19551 return false;
19552 /* FALLTHROUGH */
19553 case LABEL_REF:
19554 if (dwarf_version >= 4 || !dwarf_strict)
19555 goto rtl_addr;
19556 return false;
19557
19558 case PLUS:
19559 /* In cases where an inlined instance of an inline function is passed
19560 the address of an `auto' variable (which is local to the caller) we
19561 can get a situation where the DECL_RTL of the artificial local
19562 variable (for the inlining) which acts as a stand-in for the
19563 corresponding formal parameter (of the inline function) will look
19564 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19565 exactly a compile-time constant expression, but it isn't the address
19566 of the (artificial) local variable either. Rather, it represents the
19567 *value* which the artificial local variable always has during its
19568 lifetime. We currently have no way to represent such quasi-constant
19569 values in Dwarf, so for now we just punt and generate nothing. */
19570 return false;
19571
19572 case HIGH:
19573 case CONST_FIXED:
19574 return false;
19575
19576 case MEM:
19577 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19578 && MEM_READONLY_P (rtl)
19579 && GET_MODE (rtl) == BLKmode)
19580 {
19581 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19582 return true;
19583 }
19584 return false;
19585
19586 default:
19587 /* No other kinds of rtx should be possible here. */
19588 gcc_unreachable ();
19589 }
19590 return false;
19591 }
19592
19593 /* Determine whether the evaluation of EXPR references any variables
19594 or functions which aren't otherwise used (and therefore may not be
19595 output). */
19596 static tree
19597 reference_to_unused (tree * tp, int * walk_subtrees,
19598 void * data ATTRIBUTE_UNUSED)
19599 {
19600 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19601 *walk_subtrees = 0;
19602
19603 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19604 && ! TREE_ASM_WRITTEN (*tp))
19605 return *tp;
19606 /* ??? The C++ FE emits debug information for using decls, so
19607 putting gcc_unreachable here falls over. See PR31899. For now
19608 be conservative. */
19609 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19610 return *tp;
19611 else if (VAR_P (*tp))
19612 {
19613 varpool_node *node = varpool_node::get (*tp);
19614 if (!node || !node->definition)
19615 return *tp;
19616 }
19617 else if (TREE_CODE (*tp) == FUNCTION_DECL
19618 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19619 {
19620 /* The call graph machinery must have finished analyzing,
19621 optimizing and gimplifying the CU by now.
19622 So if *TP has no call graph node associated
19623 to it, it means *TP will not be emitted. */
19624 if (!cgraph_node::get (*tp))
19625 return *tp;
19626 }
19627 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19628 return *tp;
19629
19630 return NULL_TREE;
19631 }
19632
19633 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19634 for use in a later add_const_value_attribute call. */
19635
19636 static rtx
19637 rtl_for_decl_init (tree init, tree type)
19638 {
19639 rtx rtl = NULL_RTX;
19640
19641 STRIP_NOPS (init);
19642
19643 /* If a variable is initialized with a string constant without embedded
19644 zeros, build CONST_STRING. */
19645 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19646 {
19647 tree enttype = TREE_TYPE (type);
19648 tree domain = TYPE_DOMAIN (type);
19649 scalar_int_mode mode;
19650
19651 if (is_int_mode (TYPE_MODE (enttype), &mode)
19652 && GET_MODE_SIZE (mode) == 1
19653 && domain
19654 && TYPE_MAX_VALUE (domain)
19655 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19656 && integer_zerop (TYPE_MIN_VALUE (domain))
19657 && compare_tree_int (TYPE_MAX_VALUE (domain),
19658 TREE_STRING_LENGTH (init) - 1) == 0
19659 && ((size_t) TREE_STRING_LENGTH (init)
19660 == strlen (TREE_STRING_POINTER (init)) + 1))
19661 {
19662 rtl = gen_rtx_CONST_STRING (VOIDmode,
19663 ggc_strdup (TREE_STRING_POINTER (init)));
19664 rtl = gen_rtx_MEM (BLKmode, rtl);
19665 MEM_READONLY_P (rtl) = 1;
19666 }
19667 }
19668 /* Other aggregates, and complex values, could be represented using
19669 CONCAT: FIXME! */
19670 else if (AGGREGATE_TYPE_P (type)
19671 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19672 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19673 || TREE_CODE (type) == COMPLEX_TYPE)
19674 ;
19675 /* Vectors only work if their mode is supported by the target.
19676 FIXME: generic vectors ought to work too. */
19677 else if (TREE_CODE (type) == VECTOR_TYPE
19678 && !VECTOR_MODE_P (TYPE_MODE (type)))
19679 ;
19680 /* If the initializer is something that we know will expand into an
19681 immediate RTL constant, expand it now. We must be careful not to
19682 reference variables which won't be output. */
19683 else if (initializer_constant_valid_p (init, type)
19684 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19685 {
19686 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19687 possible. */
19688 if (TREE_CODE (type) == VECTOR_TYPE)
19689 switch (TREE_CODE (init))
19690 {
19691 case VECTOR_CST:
19692 break;
19693 case CONSTRUCTOR:
19694 if (TREE_CONSTANT (init))
19695 {
19696 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19697 bool constant_p = true;
19698 tree value;
19699 unsigned HOST_WIDE_INT ix;
19700
19701 /* Even when ctor is constant, it might contain non-*_CST
19702 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19703 belong into VECTOR_CST nodes. */
19704 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19705 if (!CONSTANT_CLASS_P (value))
19706 {
19707 constant_p = false;
19708 break;
19709 }
19710
19711 if (constant_p)
19712 {
19713 init = build_vector_from_ctor (type, elts);
19714 break;
19715 }
19716 }
19717 /* FALLTHRU */
19718
19719 default:
19720 return NULL;
19721 }
19722
19723 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19724
19725 /* If expand_expr returns a MEM, it wasn't immediate. */
19726 gcc_assert (!rtl || !MEM_P (rtl));
19727 }
19728
19729 return rtl;
19730 }
19731
19732 /* Generate RTL for the variable DECL to represent its location. */
19733
19734 static rtx
19735 rtl_for_decl_location (tree decl)
19736 {
19737 rtx rtl;
19738
19739 /* Here we have to decide where we are going to say the parameter "lives"
19740 (as far as the debugger is concerned). We only have a couple of
19741 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19742
19743 DECL_RTL normally indicates where the parameter lives during most of the
19744 activation of the function. If optimization is enabled however, this
19745 could be either NULL or else a pseudo-reg. Both of those cases indicate
19746 that the parameter doesn't really live anywhere (as far as the code
19747 generation parts of GCC are concerned) during most of the function's
19748 activation. That will happen (for example) if the parameter is never
19749 referenced within the function.
19750
19751 We could just generate a location descriptor here for all non-NULL
19752 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19753 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19754 where DECL_RTL is NULL or is a pseudo-reg.
19755
19756 Note however that we can only get away with using DECL_INCOMING_RTL as
19757 a backup substitute for DECL_RTL in certain limited cases. In cases
19758 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19759 we can be sure that the parameter was passed using the same type as it is
19760 declared to have within the function, and that its DECL_INCOMING_RTL
19761 points us to a place where a value of that type is passed.
19762
19763 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19764 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19765 because in these cases DECL_INCOMING_RTL points us to a value of some
19766 type which is *different* from the type of the parameter itself. Thus,
19767 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19768 such cases, the debugger would end up (for example) trying to fetch a
19769 `float' from a place which actually contains the first part of a
19770 `double'. That would lead to really incorrect and confusing
19771 output at debug-time.
19772
19773 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19774 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19775 are a couple of exceptions however. On little-endian machines we can
19776 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19777 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19778 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19779 when (on a little-endian machine) a non-prototyped function has a
19780 parameter declared to be of type `short' or `char'. In such cases,
19781 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19782 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19783 passed `int' value. If the debugger then uses that address to fetch
19784 a `short' or a `char' (on a little-endian machine) the result will be
19785 the correct data, so we allow for such exceptional cases below.
19786
19787 Note that our goal here is to describe the place where the given formal
19788 parameter lives during most of the function's activation (i.e. between the
19789 end of the prologue and the start of the epilogue). We'll do that as best
19790 as we can. Note however that if the given formal parameter is modified
19791 sometime during the execution of the function, then a stack backtrace (at
19792 debug-time) will show the function as having been called with the *new*
19793 value rather than the value which was originally passed in. This happens
19794 rarely enough that it is not a major problem, but it *is* a problem, and
19795 I'd like to fix it.
19796
19797 A future version of dwarf2out.c may generate two additional attributes for
19798 any given DW_TAG_formal_parameter DIE which will describe the "passed
19799 type" and the "passed location" for the given formal parameter in addition
19800 to the attributes we now generate to indicate the "declared type" and the
19801 "active location" for each parameter. This additional set of attributes
19802 could be used by debuggers for stack backtraces. Separately, note that
19803 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19804 This happens (for example) for inlined-instances of inline function formal
19805 parameters which are never referenced. This really shouldn't be
19806 happening. All PARM_DECL nodes should get valid non-NULL
19807 DECL_INCOMING_RTL values. FIXME. */
19808
19809 /* Use DECL_RTL as the "location" unless we find something better. */
19810 rtl = DECL_RTL_IF_SET (decl);
19811
19812 /* When generating abstract instances, ignore everything except
19813 constants, symbols living in memory, and symbols living in
19814 fixed registers. */
19815 if (! reload_completed)
19816 {
19817 if (rtl
19818 && (CONSTANT_P (rtl)
19819 || (MEM_P (rtl)
19820 && CONSTANT_P (XEXP (rtl, 0)))
19821 || (REG_P (rtl)
19822 && VAR_P (decl)
19823 && TREE_STATIC (decl))))
19824 {
19825 rtl = targetm.delegitimize_address (rtl);
19826 return rtl;
19827 }
19828 rtl = NULL_RTX;
19829 }
19830 else if (TREE_CODE (decl) == PARM_DECL)
19831 {
19832 if (rtl == NULL_RTX
19833 || is_pseudo_reg (rtl)
19834 || (MEM_P (rtl)
19835 && is_pseudo_reg (XEXP (rtl, 0))
19836 && DECL_INCOMING_RTL (decl)
19837 && MEM_P (DECL_INCOMING_RTL (decl))
19838 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19839 {
19840 tree declared_type = TREE_TYPE (decl);
19841 tree passed_type = DECL_ARG_TYPE (decl);
19842 machine_mode dmode = TYPE_MODE (declared_type);
19843 machine_mode pmode = TYPE_MODE (passed_type);
19844
19845 /* This decl represents a formal parameter which was optimized out.
19846 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19847 all cases where (rtl == NULL_RTX) just below. */
19848 if (dmode == pmode)
19849 rtl = DECL_INCOMING_RTL (decl);
19850 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19851 && SCALAR_INT_MODE_P (dmode)
19852 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19853 && DECL_INCOMING_RTL (decl))
19854 {
19855 rtx inc = DECL_INCOMING_RTL (decl);
19856 if (REG_P (inc))
19857 rtl = inc;
19858 else if (MEM_P (inc))
19859 {
19860 if (BYTES_BIG_ENDIAN)
19861 rtl = adjust_address_nv (inc, dmode,
19862 GET_MODE_SIZE (pmode)
19863 - GET_MODE_SIZE (dmode));
19864 else
19865 rtl = inc;
19866 }
19867 }
19868 }
19869
19870 /* If the parm was passed in registers, but lives on the stack, then
19871 make a big endian correction if the mode of the type of the
19872 parameter is not the same as the mode of the rtl. */
19873 /* ??? This is the same series of checks that are made in dbxout.c before
19874 we reach the big endian correction code there. It isn't clear if all
19875 of these checks are necessary here, but keeping them all is the safe
19876 thing to do. */
19877 else if (MEM_P (rtl)
19878 && XEXP (rtl, 0) != const0_rtx
19879 && ! CONSTANT_P (XEXP (rtl, 0))
19880 /* Not passed in memory. */
19881 && !MEM_P (DECL_INCOMING_RTL (decl))
19882 /* Not passed by invisible reference. */
19883 && (!REG_P (XEXP (rtl, 0))
19884 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19885 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19886 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19887 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19888 #endif
19889 )
19890 /* Big endian correction check. */
19891 && BYTES_BIG_ENDIAN
19892 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19893 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19894 UNITS_PER_WORD))
19895 {
19896 machine_mode addr_mode = get_address_mode (rtl);
19897 poly_int64 offset = (UNITS_PER_WORD
19898 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19899
19900 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19901 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19902 }
19903 }
19904 else if (VAR_P (decl)
19905 && rtl
19906 && MEM_P (rtl)
19907 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19908 {
19909 machine_mode addr_mode = get_address_mode (rtl);
19910 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19911 GET_MODE (rtl));
19912
19913 /* If a variable is declared "register" yet is smaller than
19914 a register, then if we store the variable to memory, it
19915 looks like we're storing a register-sized value, when in
19916 fact we are not. We need to adjust the offset of the
19917 storage location to reflect the actual value's bytes,
19918 else gdb will not be able to display it. */
19919 if (maybe_ne (offset, 0))
19920 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19921 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19922 }
19923
19924 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19925 and will have been substituted directly into all expressions that use it.
19926 C does not have such a concept, but C++ and other languages do. */
19927 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19928 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19929
19930 if (rtl)
19931 rtl = targetm.delegitimize_address (rtl);
19932
19933 /* If we don't look past the constant pool, we risk emitting a
19934 reference to a constant pool entry that isn't referenced from
19935 code, and thus is not emitted. */
19936 if (rtl)
19937 rtl = avoid_constant_pool_reference (rtl);
19938
19939 /* Try harder to get a rtl. If this symbol ends up not being emitted
19940 in the current CU, resolve_addr will remove the expression referencing
19941 it. */
19942 if (rtl == NULL_RTX
19943 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19944 && VAR_P (decl)
19945 && !DECL_EXTERNAL (decl)
19946 && TREE_STATIC (decl)
19947 && DECL_NAME (decl)
19948 && !DECL_HARD_REGISTER (decl)
19949 && DECL_MODE (decl) != VOIDmode)
19950 {
19951 rtl = make_decl_rtl_for_debug (decl);
19952 if (!MEM_P (rtl)
19953 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19954 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19955 rtl = NULL_RTX;
19956 }
19957
19958 return rtl;
19959 }
19960
19961 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19962 returned. If so, the decl for the COMMON block is returned, and the
19963 value is the offset into the common block for the symbol. */
19964
19965 static tree
19966 fortran_common (tree decl, HOST_WIDE_INT *value)
19967 {
19968 tree val_expr, cvar;
19969 machine_mode mode;
19970 poly_int64 bitsize, bitpos;
19971 tree offset;
19972 HOST_WIDE_INT cbitpos;
19973 int unsignedp, reversep, volatilep = 0;
19974
19975 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19976 it does not have a value (the offset into the common area), or if it
19977 is thread local (as opposed to global) then it isn't common, and shouldn't
19978 be handled as such. */
19979 if (!VAR_P (decl)
19980 || !TREE_STATIC (decl)
19981 || !DECL_HAS_VALUE_EXPR_P (decl)
19982 || !is_fortran ())
19983 return NULL_TREE;
19984
19985 val_expr = DECL_VALUE_EXPR (decl);
19986 if (TREE_CODE (val_expr) != COMPONENT_REF)
19987 return NULL_TREE;
19988
19989 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19990 &unsignedp, &reversep, &volatilep);
19991
19992 if (cvar == NULL_TREE
19993 || !VAR_P (cvar)
19994 || DECL_ARTIFICIAL (cvar)
19995 || !TREE_PUBLIC (cvar)
19996 /* We don't expect to have to cope with variable offsets,
19997 since at present all static data must have a constant size. */
19998 || !bitpos.is_constant (&cbitpos))
19999 return NULL_TREE;
20000
20001 *value = 0;
20002 if (offset != NULL)
20003 {
20004 if (!tree_fits_shwi_p (offset))
20005 return NULL_TREE;
20006 *value = tree_to_shwi (offset);
20007 }
20008 if (cbitpos != 0)
20009 *value += cbitpos / BITS_PER_UNIT;
20010
20011 return cvar;
20012 }
20013
20014 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20015 data attribute for a variable or a parameter. We generate the
20016 DW_AT_const_value attribute only in those cases where the given variable
20017 or parameter does not have a true "location" either in memory or in a
20018 register. This can happen (for example) when a constant is passed as an
20019 actual argument in a call to an inline function. (It's possible that
20020 these things can crop up in other ways also.) Note that one type of
20021 constant value which can be passed into an inlined function is a constant
20022 pointer. This can happen for example if an actual argument in an inlined
20023 function call evaluates to a compile-time constant address.
20024
20025 CACHE_P is true if it is worth caching the location list for DECL,
20026 so that future calls can reuse it rather than regenerate it from scratch.
20027 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20028 since we will need to refer to them each time the function is inlined. */
20029
20030 static bool
20031 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20032 {
20033 rtx rtl;
20034 dw_loc_list_ref list;
20035 var_loc_list *loc_list;
20036 cached_dw_loc_list *cache;
20037
20038 if (early_dwarf)
20039 return false;
20040
20041 if (TREE_CODE (decl) == ERROR_MARK)
20042 return false;
20043
20044 if (get_AT (die, DW_AT_location)
20045 || get_AT (die, DW_AT_const_value))
20046 return true;
20047
20048 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20049 || TREE_CODE (decl) == RESULT_DECL);
20050
20051 /* Try to get some constant RTL for this decl, and use that as the value of
20052 the location. */
20053
20054 rtl = rtl_for_decl_location (decl);
20055 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20056 && add_const_value_attribute (die, rtl))
20057 return true;
20058
20059 /* See if we have single element location list that is equivalent to
20060 a constant value. That way we are better to use add_const_value_attribute
20061 rather than expanding constant value equivalent. */
20062 loc_list = lookup_decl_loc (decl);
20063 if (loc_list
20064 && loc_list->first
20065 && loc_list->first->next == NULL
20066 && NOTE_P (loc_list->first->loc)
20067 && NOTE_VAR_LOCATION (loc_list->first->loc)
20068 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20069 {
20070 struct var_loc_node *node;
20071
20072 node = loc_list->first;
20073 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20074 if (GET_CODE (rtl) == EXPR_LIST)
20075 rtl = XEXP (rtl, 0);
20076 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20077 && add_const_value_attribute (die, rtl))
20078 return true;
20079 }
20080 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20081 list several times. See if we've already cached the contents. */
20082 list = NULL;
20083 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20084 cache_p = false;
20085 if (cache_p)
20086 {
20087 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20088 if (cache)
20089 list = cache->loc_list;
20090 }
20091 if (list == NULL)
20092 {
20093 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20094 NULL);
20095 /* It is usually worth caching this result if the decl is from
20096 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20097 if (cache_p && list && list->dw_loc_next)
20098 {
20099 cached_dw_loc_list **slot
20100 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20101 DECL_UID (decl),
20102 INSERT);
20103 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20104 cache->decl_id = DECL_UID (decl);
20105 cache->loc_list = list;
20106 *slot = cache;
20107 }
20108 }
20109 if (list)
20110 {
20111 add_AT_location_description (die, DW_AT_location, list);
20112 return true;
20113 }
20114 /* None of that worked, so it must not really have a location;
20115 try adding a constant value attribute from the DECL_INITIAL. */
20116 return tree_add_const_value_attribute_for_decl (die, decl);
20117 }
20118
20119 /* Helper function for tree_add_const_value_attribute. Natively encode
20120 initializer INIT into an array. Return true if successful. */
20121
20122 static bool
20123 native_encode_initializer (tree init, unsigned char *array, int size)
20124 {
20125 tree type;
20126
20127 if (init == NULL_TREE)
20128 return false;
20129
20130 STRIP_NOPS (init);
20131 switch (TREE_CODE (init))
20132 {
20133 case STRING_CST:
20134 type = TREE_TYPE (init);
20135 if (TREE_CODE (type) == ARRAY_TYPE)
20136 {
20137 tree enttype = TREE_TYPE (type);
20138 scalar_int_mode mode;
20139
20140 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20141 || GET_MODE_SIZE (mode) != 1)
20142 return false;
20143 if (int_size_in_bytes (type) != size)
20144 return false;
20145 if (size > TREE_STRING_LENGTH (init))
20146 {
20147 memcpy (array, TREE_STRING_POINTER (init),
20148 TREE_STRING_LENGTH (init));
20149 memset (array + TREE_STRING_LENGTH (init),
20150 '\0', size - TREE_STRING_LENGTH (init));
20151 }
20152 else
20153 memcpy (array, TREE_STRING_POINTER (init), size);
20154 return true;
20155 }
20156 return false;
20157 case CONSTRUCTOR:
20158 type = TREE_TYPE (init);
20159 if (int_size_in_bytes (type) != size)
20160 return false;
20161 if (TREE_CODE (type) == ARRAY_TYPE)
20162 {
20163 HOST_WIDE_INT min_index;
20164 unsigned HOST_WIDE_INT cnt;
20165 int curpos = 0, fieldsize;
20166 constructor_elt *ce;
20167
20168 if (TYPE_DOMAIN (type) == NULL_TREE
20169 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20170 return false;
20171
20172 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20173 if (fieldsize <= 0)
20174 return false;
20175
20176 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20177 memset (array, '\0', size);
20178 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20179 {
20180 tree val = ce->value;
20181 tree index = ce->index;
20182 int pos = curpos;
20183 if (index && TREE_CODE (index) == RANGE_EXPR)
20184 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20185 * fieldsize;
20186 else if (index)
20187 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20188
20189 if (val)
20190 {
20191 STRIP_NOPS (val);
20192 if (!native_encode_initializer (val, array + pos, fieldsize))
20193 return false;
20194 }
20195 curpos = pos + fieldsize;
20196 if (index && TREE_CODE (index) == RANGE_EXPR)
20197 {
20198 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20199 - tree_to_shwi (TREE_OPERAND (index, 0));
20200 while (count-- > 0)
20201 {
20202 if (val)
20203 memcpy (array + curpos, array + pos, fieldsize);
20204 curpos += fieldsize;
20205 }
20206 }
20207 gcc_assert (curpos <= size);
20208 }
20209 return true;
20210 }
20211 else if (TREE_CODE (type) == RECORD_TYPE
20212 || TREE_CODE (type) == UNION_TYPE)
20213 {
20214 tree field = NULL_TREE;
20215 unsigned HOST_WIDE_INT cnt;
20216 constructor_elt *ce;
20217
20218 if (int_size_in_bytes (type) != size)
20219 return false;
20220
20221 if (TREE_CODE (type) == RECORD_TYPE)
20222 field = TYPE_FIELDS (type);
20223
20224 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20225 {
20226 tree val = ce->value;
20227 int pos, fieldsize;
20228
20229 if (ce->index != 0)
20230 field = ce->index;
20231
20232 if (val)
20233 STRIP_NOPS (val);
20234
20235 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20236 return false;
20237
20238 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20239 && TYPE_DOMAIN (TREE_TYPE (field))
20240 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20241 return false;
20242 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20243 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20244 return false;
20245 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20246 pos = int_byte_position (field);
20247 gcc_assert (pos + fieldsize <= size);
20248 if (val && fieldsize != 0
20249 && !native_encode_initializer (val, array + pos, fieldsize))
20250 return false;
20251 }
20252 return true;
20253 }
20254 return false;
20255 case VIEW_CONVERT_EXPR:
20256 case NON_LVALUE_EXPR:
20257 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20258 default:
20259 return native_encode_expr (init, array, size) == size;
20260 }
20261 }
20262
20263 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20264 attribute is the const value T. */
20265
20266 static bool
20267 tree_add_const_value_attribute (dw_die_ref die, tree t)
20268 {
20269 tree init;
20270 tree type = TREE_TYPE (t);
20271 rtx rtl;
20272
20273 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20274 return false;
20275
20276 init = t;
20277 gcc_assert (!DECL_P (init));
20278
20279 if (TREE_CODE (init) == INTEGER_CST)
20280 {
20281 if (tree_fits_uhwi_p (init))
20282 {
20283 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20284 return true;
20285 }
20286 if (tree_fits_shwi_p (init))
20287 {
20288 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20289 return true;
20290 }
20291 }
20292 if (! early_dwarf)
20293 {
20294 rtl = rtl_for_decl_init (init, type);
20295 if (rtl)
20296 return add_const_value_attribute (die, rtl);
20297 }
20298 /* If the host and target are sane, try harder. */
20299 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20300 && initializer_constant_valid_p (init, type))
20301 {
20302 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20303 if (size > 0 && (int) size == size)
20304 {
20305 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20306
20307 if (native_encode_initializer (init, array, size))
20308 {
20309 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20310 return true;
20311 }
20312 ggc_free (array);
20313 }
20314 }
20315 return false;
20316 }
20317
20318 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20319 attribute is the const value of T, where T is an integral constant
20320 variable with static storage duration
20321 (so it can't be a PARM_DECL or a RESULT_DECL). */
20322
20323 static bool
20324 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20325 {
20326
20327 if (!decl
20328 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20329 || (VAR_P (decl) && !TREE_STATIC (decl)))
20330 return false;
20331
20332 if (TREE_READONLY (decl)
20333 && ! TREE_THIS_VOLATILE (decl)
20334 && DECL_INITIAL (decl))
20335 /* OK */;
20336 else
20337 return false;
20338
20339 /* Don't add DW_AT_const_value if abstract origin already has one. */
20340 if (get_AT (var_die, DW_AT_const_value))
20341 return false;
20342
20343 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20344 }
20345
20346 /* Convert the CFI instructions for the current function into a
20347 location list. This is used for DW_AT_frame_base when we targeting
20348 a dwarf2 consumer that does not support the dwarf3
20349 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20350 expressions. */
20351
20352 static dw_loc_list_ref
20353 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20354 {
20355 int ix;
20356 dw_fde_ref fde;
20357 dw_loc_list_ref list, *list_tail;
20358 dw_cfi_ref cfi;
20359 dw_cfa_location last_cfa, next_cfa;
20360 const char *start_label, *last_label, *section;
20361 dw_cfa_location remember;
20362
20363 fde = cfun->fde;
20364 gcc_assert (fde != NULL);
20365
20366 section = secname_for_decl (current_function_decl);
20367 list_tail = &list;
20368 list = NULL;
20369
20370 memset (&next_cfa, 0, sizeof (next_cfa));
20371 next_cfa.reg = INVALID_REGNUM;
20372 remember = next_cfa;
20373
20374 start_label = fde->dw_fde_begin;
20375
20376 /* ??? Bald assumption that the CIE opcode list does not contain
20377 advance opcodes. */
20378 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20379 lookup_cfa_1 (cfi, &next_cfa, &remember);
20380
20381 last_cfa = next_cfa;
20382 last_label = start_label;
20383
20384 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20385 {
20386 /* If the first partition contained no CFI adjustments, the
20387 CIE opcodes apply to the whole first partition. */
20388 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20389 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20390 list_tail =&(*list_tail)->dw_loc_next;
20391 start_label = last_label = fde->dw_fde_second_begin;
20392 }
20393
20394 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20395 {
20396 switch (cfi->dw_cfi_opc)
20397 {
20398 case DW_CFA_set_loc:
20399 case DW_CFA_advance_loc1:
20400 case DW_CFA_advance_loc2:
20401 case DW_CFA_advance_loc4:
20402 if (!cfa_equal_p (&last_cfa, &next_cfa))
20403 {
20404 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20405 start_label, 0, last_label, 0, section);
20406
20407 list_tail = &(*list_tail)->dw_loc_next;
20408 last_cfa = next_cfa;
20409 start_label = last_label;
20410 }
20411 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20412 break;
20413
20414 case DW_CFA_advance_loc:
20415 /* The encoding is complex enough that we should never emit this. */
20416 gcc_unreachable ();
20417
20418 default:
20419 lookup_cfa_1 (cfi, &next_cfa, &remember);
20420 break;
20421 }
20422 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20423 {
20424 if (!cfa_equal_p (&last_cfa, &next_cfa))
20425 {
20426 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20427 start_label, 0, last_label, 0, section);
20428
20429 list_tail = &(*list_tail)->dw_loc_next;
20430 last_cfa = next_cfa;
20431 start_label = last_label;
20432 }
20433 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20434 start_label, 0, fde->dw_fde_end, 0, section);
20435 list_tail = &(*list_tail)->dw_loc_next;
20436 start_label = last_label = fde->dw_fde_second_begin;
20437 }
20438 }
20439
20440 if (!cfa_equal_p (&last_cfa, &next_cfa))
20441 {
20442 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20443 start_label, 0, last_label, 0, section);
20444 list_tail = &(*list_tail)->dw_loc_next;
20445 start_label = last_label;
20446 }
20447
20448 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20449 start_label, 0,
20450 fde->dw_fde_second_begin
20451 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20452 section);
20453
20454 maybe_gen_llsym (list);
20455
20456 return list;
20457 }
20458
20459 /* Compute a displacement from the "steady-state frame pointer" to the
20460 frame base (often the same as the CFA), and store it in
20461 frame_pointer_fb_offset. OFFSET is added to the displacement
20462 before the latter is negated. */
20463
20464 static void
20465 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20466 {
20467 rtx reg, elim;
20468
20469 #ifdef FRAME_POINTER_CFA_OFFSET
20470 reg = frame_pointer_rtx;
20471 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20472 #else
20473 reg = arg_pointer_rtx;
20474 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20475 #endif
20476
20477 elim = (ira_use_lra_p
20478 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20479 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20480 elim = strip_offset_and_add (elim, &offset);
20481
20482 frame_pointer_fb_offset = -offset;
20483
20484 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20485 in which to eliminate. This is because it's stack pointer isn't
20486 directly accessible as a register within the ISA. To work around
20487 this, assume that while we cannot provide a proper value for
20488 frame_pointer_fb_offset, we won't need one either. */
20489 frame_pointer_fb_offset_valid
20490 = ((SUPPORTS_STACK_ALIGNMENT
20491 && (elim == hard_frame_pointer_rtx
20492 || elim == stack_pointer_rtx))
20493 || elim == (frame_pointer_needed
20494 ? hard_frame_pointer_rtx
20495 : stack_pointer_rtx));
20496 }
20497
20498 /* Generate a DW_AT_name attribute given some string value to be included as
20499 the value of the attribute. */
20500
20501 static void
20502 add_name_attribute (dw_die_ref die, const char *name_string)
20503 {
20504 if (name_string != NULL && *name_string != 0)
20505 {
20506 if (demangle_name_func)
20507 name_string = (*demangle_name_func) (name_string);
20508
20509 add_AT_string (die, DW_AT_name, name_string);
20510 }
20511 }
20512
20513 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20514 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20515 of TYPE accordingly.
20516
20517 ??? This is a temporary measure until after we're able to generate
20518 regular DWARF for the complex Ada type system. */
20519
20520 static void
20521 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20522 dw_die_ref context_die)
20523 {
20524 tree dtype;
20525 dw_die_ref dtype_die;
20526
20527 if (!lang_hooks.types.descriptive_type)
20528 return;
20529
20530 dtype = lang_hooks.types.descriptive_type (type);
20531 if (!dtype)
20532 return;
20533
20534 dtype_die = lookup_type_die (dtype);
20535 if (!dtype_die)
20536 {
20537 gen_type_die (dtype, context_die);
20538 dtype_die = lookup_type_die (dtype);
20539 gcc_assert (dtype_die);
20540 }
20541
20542 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20543 }
20544
20545 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20546
20547 static const char *
20548 comp_dir_string (void)
20549 {
20550 const char *wd;
20551 char *wd1;
20552 static const char *cached_wd = NULL;
20553
20554 if (cached_wd != NULL)
20555 return cached_wd;
20556
20557 wd = get_src_pwd ();
20558 if (wd == NULL)
20559 return NULL;
20560
20561 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20562 {
20563 int wdlen;
20564
20565 wdlen = strlen (wd);
20566 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20567 strcpy (wd1, wd);
20568 wd1 [wdlen] = DIR_SEPARATOR;
20569 wd1 [wdlen + 1] = 0;
20570 wd = wd1;
20571 }
20572
20573 cached_wd = remap_debug_filename (wd);
20574 return cached_wd;
20575 }
20576
20577 /* Generate a DW_AT_comp_dir attribute for DIE. */
20578
20579 static void
20580 add_comp_dir_attribute (dw_die_ref die)
20581 {
20582 const char * wd = comp_dir_string ();
20583 if (wd != NULL)
20584 add_AT_string (die, DW_AT_comp_dir, wd);
20585 }
20586
20587 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20588 pointer computation, ...), output a representation for that bound according
20589 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20590 loc_list_from_tree for the meaning of CONTEXT. */
20591
20592 static void
20593 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20594 int forms, struct loc_descr_context *context)
20595 {
20596 dw_die_ref context_die, decl_die;
20597 dw_loc_list_ref list;
20598 bool strip_conversions = true;
20599 bool placeholder_seen = false;
20600
20601 while (strip_conversions)
20602 switch (TREE_CODE (value))
20603 {
20604 case ERROR_MARK:
20605 case SAVE_EXPR:
20606 return;
20607
20608 CASE_CONVERT:
20609 case VIEW_CONVERT_EXPR:
20610 value = TREE_OPERAND (value, 0);
20611 break;
20612
20613 default:
20614 strip_conversions = false;
20615 break;
20616 }
20617
20618 /* If possible and permitted, output the attribute as a constant. */
20619 if ((forms & dw_scalar_form_constant) != 0
20620 && TREE_CODE (value) == INTEGER_CST)
20621 {
20622 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20623
20624 /* If HOST_WIDE_INT is big enough then represent the bound as
20625 a constant value. We need to choose a form based on
20626 whether the type is signed or unsigned. We cannot just
20627 call add_AT_unsigned if the value itself is positive
20628 (add_AT_unsigned might add the unsigned value encoded as
20629 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20630 bounds type and then sign extend any unsigned values found
20631 for signed types. This is needed only for
20632 DW_AT_{lower,upper}_bound, since for most other attributes,
20633 consumers will treat DW_FORM_data[1248] as unsigned values,
20634 regardless of the underlying type. */
20635 if (prec <= HOST_BITS_PER_WIDE_INT
20636 || tree_fits_uhwi_p (value))
20637 {
20638 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20639 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20640 else
20641 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20642 }
20643 else
20644 /* Otherwise represent the bound as an unsigned value with
20645 the precision of its type. The precision and signedness
20646 of the type will be necessary to re-interpret it
20647 unambiguously. */
20648 add_AT_wide (die, attr, wi::to_wide (value));
20649 return;
20650 }
20651
20652 /* Otherwise, if it's possible and permitted too, output a reference to
20653 another DIE. */
20654 if ((forms & dw_scalar_form_reference) != 0)
20655 {
20656 tree decl = NULL_TREE;
20657
20658 /* Some type attributes reference an outer type. For instance, the upper
20659 bound of an array may reference an embedding record (this happens in
20660 Ada). */
20661 if (TREE_CODE (value) == COMPONENT_REF
20662 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20663 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20664 decl = TREE_OPERAND (value, 1);
20665
20666 else if (VAR_P (value)
20667 || TREE_CODE (value) == PARM_DECL
20668 || TREE_CODE (value) == RESULT_DECL)
20669 decl = value;
20670
20671 if (decl != NULL_TREE)
20672 {
20673 dw_die_ref decl_die = lookup_decl_die (decl);
20674
20675 /* ??? Can this happen, or should the variable have been bound
20676 first? Probably it can, since I imagine that we try to create
20677 the types of parameters in the order in which they exist in
20678 the list, and won't have created a forward reference to a
20679 later parameter. */
20680 if (decl_die != NULL)
20681 {
20682 add_AT_die_ref (die, attr, decl_die);
20683 return;
20684 }
20685 }
20686 }
20687
20688 /* Last chance: try to create a stack operation procedure to evaluate the
20689 value. Do nothing if even that is not possible or permitted. */
20690 if ((forms & dw_scalar_form_exprloc) == 0)
20691 return;
20692
20693 list = loc_list_from_tree (value, 2, context);
20694 if (context && context->placeholder_arg)
20695 {
20696 placeholder_seen = context->placeholder_seen;
20697 context->placeholder_seen = false;
20698 }
20699 if (list == NULL || single_element_loc_list_p (list))
20700 {
20701 /* If this attribute is not a reference nor constant, it is
20702 a DWARF expression rather than location description. For that
20703 loc_list_from_tree (value, 0, &context) is needed. */
20704 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20705 if (list2 && single_element_loc_list_p (list2))
20706 {
20707 if (placeholder_seen)
20708 {
20709 struct dwarf_procedure_info dpi;
20710 dpi.fndecl = NULL_TREE;
20711 dpi.args_count = 1;
20712 if (!resolve_args_picking (list2->expr, 1, &dpi))
20713 return;
20714 }
20715 add_AT_loc (die, attr, list2->expr);
20716 return;
20717 }
20718 }
20719
20720 /* If that failed to give a single element location list, fall back to
20721 outputting this as a reference... still if permitted. */
20722 if (list == NULL
20723 || (forms & dw_scalar_form_reference) == 0
20724 || placeholder_seen)
20725 return;
20726
20727 if (current_function_decl == 0)
20728 context_die = comp_unit_die ();
20729 else
20730 context_die = lookup_decl_die (current_function_decl);
20731
20732 decl_die = new_die (DW_TAG_variable, context_die, value);
20733 add_AT_flag (decl_die, DW_AT_artificial, 1);
20734 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20735 context_die);
20736 add_AT_location_description (decl_die, DW_AT_location, list);
20737 add_AT_die_ref (die, attr, decl_die);
20738 }
20739
20740 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20741 default. */
20742
20743 static int
20744 lower_bound_default (void)
20745 {
20746 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20747 {
20748 case DW_LANG_C:
20749 case DW_LANG_C89:
20750 case DW_LANG_C99:
20751 case DW_LANG_C11:
20752 case DW_LANG_C_plus_plus:
20753 case DW_LANG_C_plus_plus_11:
20754 case DW_LANG_C_plus_plus_14:
20755 case DW_LANG_ObjC:
20756 case DW_LANG_ObjC_plus_plus:
20757 return 0;
20758 case DW_LANG_Fortran77:
20759 case DW_LANG_Fortran90:
20760 case DW_LANG_Fortran95:
20761 case DW_LANG_Fortran03:
20762 case DW_LANG_Fortran08:
20763 return 1;
20764 case DW_LANG_UPC:
20765 case DW_LANG_D:
20766 case DW_LANG_Python:
20767 return dwarf_version >= 4 ? 0 : -1;
20768 case DW_LANG_Ada95:
20769 case DW_LANG_Ada83:
20770 case DW_LANG_Cobol74:
20771 case DW_LANG_Cobol85:
20772 case DW_LANG_Modula2:
20773 case DW_LANG_PLI:
20774 return dwarf_version >= 4 ? 1 : -1;
20775 default:
20776 return -1;
20777 }
20778 }
20779
20780 /* Given a tree node describing an array bound (either lower or upper) output
20781 a representation for that bound. */
20782
20783 static void
20784 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20785 tree bound, struct loc_descr_context *context)
20786 {
20787 int dflt;
20788
20789 while (1)
20790 switch (TREE_CODE (bound))
20791 {
20792 /* Strip all conversions. */
20793 CASE_CONVERT:
20794 case VIEW_CONVERT_EXPR:
20795 bound = TREE_OPERAND (bound, 0);
20796 break;
20797
20798 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20799 are even omitted when they are the default. */
20800 case INTEGER_CST:
20801 /* If the value for this bound is the default one, we can even omit the
20802 attribute. */
20803 if (bound_attr == DW_AT_lower_bound
20804 && tree_fits_shwi_p (bound)
20805 && (dflt = lower_bound_default ()) != -1
20806 && tree_to_shwi (bound) == dflt)
20807 return;
20808
20809 /* FALLTHRU */
20810
20811 default:
20812 /* Because of the complex interaction there can be with other GNAT
20813 encodings, GDB isn't ready yet to handle proper DWARF description
20814 for self-referencial subrange bounds: let GNAT encodings do the
20815 magic in such a case. */
20816 if (is_ada ()
20817 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20818 && contains_placeholder_p (bound))
20819 return;
20820
20821 add_scalar_info (subrange_die, bound_attr, bound,
20822 dw_scalar_form_constant
20823 | dw_scalar_form_exprloc
20824 | dw_scalar_form_reference,
20825 context);
20826 return;
20827 }
20828 }
20829
20830 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20831 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20832 Note that the block of subscript information for an array type also
20833 includes information about the element type of the given array type.
20834
20835 This function reuses previously set type and bound information if
20836 available. */
20837
20838 static void
20839 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20840 {
20841 unsigned dimension_number;
20842 tree lower, upper;
20843 dw_die_ref child = type_die->die_child;
20844
20845 for (dimension_number = 0;
20846 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20847 type = TREE_TYPE (type), dimension_number++)
20848 {
20849 tree domain = TYPE_DOMAIN (type);
20850
20851 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20852 break;
20853
20854 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20855 and (in GNU C only) variable bounds. Handle all three forms
20856 here. */
20857
20858 /* Find and reuse a previously generated DW_TAG_subrange_type if
20859 available.
20860
20861 For multi-dimensional arrays, as we iterate through the
20862 various dimensions in the enclosing for loop above, we also
20863 iterate through the DIE children and pick at each
20864 DW_TAG_subrange_type previously generated (if available).
20865 Each child DW_TAG_subrange_type DIE describes the range of
20866 the current dimension. At this point we should have as many
20867 DW_TAG_subrange_type's as we have dimensions in the
20868 array. */
20869 dw_die_ref subrange_die = NULL;
20870 if (child)
20871 while (1)
20872 {
20873 child = child->die_sib;
20874 if (child->die_tag == DW_TAG_subrange_type)
20875 subrange_die = child;
20876 if (child == type_die->die_child)
20877 {
20878 /* If we wrapped around, stop looking next time. */
20879 child = NULL;
20880 break;
20881 }
20882 if (child->die_tag == DW_TAG_subrange_type)
20883 break;
20884 }
20885 if (!subrange_die)
20886 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20887
20888 if (domain)
20889 {
20890 /* We have an array type with specified bounds. */
20891 lower = TYPE_MIN_VALUE (domain);
20892 upper = TYPE_MAX_VALUE (domain);
20893
20894 /* Define the index type. */
20895 if (TREE_TYPE (domain)
20896 && !get_AT (subrange_die, DW_AT_type))
20897 {
20898 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20899 TREE_TYPE field. We can't emit debug info for this
20900 because it is an unnamed integral type. */
20901 if (TREE_CODE (domain) == INTEGER_TYPE
20902 && TYPE_NAME (domain) == NULL_TREE
20903 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20904 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20905 ;
20906 else
20907 add_type_attribute (subrange_die, TREE_TYPE (domain),
20908 TYPE_UNQUALIFIED, false, type_die);
20909 }
20910
20911 /* ??? If upper is NULL, the array has unspecified length,
20912 but it does have a lower bound. This happens with Fortran
20913 dimension arr(N:*)
20914 Since the debugger is definitely going to need to know N
20915 to produce useful results, go ahead and output the lower
20916 bound solo, and hope the debugger can cope. */
20917
20918 if (!get_AT (subrange_die, DW_AT_lower_bound))
20919 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20920 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20921 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20922 }
20923
20924 /* Otherwise we have an array type with an unspecified length. The
20925 DWARF-2 spec does not say how to handle this; let's just leave out the
20926 bounds. */
20927 }
20928 }
20929
20930 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20931
20932 static void
20933 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20934 {
20935 dw_die_ref decl_die;
20936 HOST_WIDE_INT size;
20937 dw_loc_descr_ref size_expr = NULL;
20938
20939 switch (TREE_CODE (tree_node))
20940 {
20941 case ERROR_MARK:
20942 size = 0;
20943 break;
20944 case ENUMERAL_TYPE:
20945 case RECORD_TYPE:
20946 case UNION_TYPE:
20947 case QUAL_UNION_TYPE:
20948 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20949 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20950 {
20951 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20952 return;
20953 }
20954 size_expr = type_byte_size (tree_node, &size);
20955 break;
20956 case FIELD_DECL:
20957 /* For a data member of a struct or union, the DW_AT_byte_size is
20958 generally given as the number of bytes normally allocated for an
20959 object of the *declared* type of the member itself. This is true
20960 even for bit-fields. */
20961 size = int_size_in_bytes (field_type (tree_node));
20962 break;
20963 default:
20964 gcc_unreachable ();
20965 }
20966
20967 /* Support for dynamically-sized objects was introduced by DWARFv3.
20968 At the moment, GDB does not handle variable byte sizes very well,
20969 though. */
20970 if ((dwarf_version >= 3 || !dwarf_strict)
20971 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20972 && size_expr != NULL)
20973 add_AT_loc (die, DW_AT_byte_size, size_expr);
20974
20975 /* Note that `size' might be -1 when we get to this point. If it is, that
20976 indicates that the byte size of the entity in question is variable and
20977 that we could not generate a DWARF expression that computes it. */
20978 if (size >= 0)
20979 add_AT_unsigned (die, DW_AT_byte_size, size);
20980 }
20981
20982 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20983 alignment. */
20984
20985 static void
20986 add_alignment_attribute (dw_die_ref die, tree tree_node)
20987 {
20988 if (dwarf_version < 5 && dwarf_strict)
20989 return;
20990
20991 unsigned align;
20992
20993 if (DECL_P (tree_node))
20994 {
20995 if (!DECL_USER_ALIGN (tree_node))
20996 return;
20997
20998 align = DECL_ALIGN_UNIT (tree_node);
20999 }
21000 else if (TYPE_P (tree_node))
21001 {
21002 if (!TYPE_USER_ALIGN (tree_node))
21003 return;
21004
21005 align = TYPE_ALIGN_UNIT (tree_node);
21006 }
21007 else
21008 gcc_unreachable ();
21009
21010 add_AT_unsigned (die, DW_AT_alignment, align);
21011 }
21012
21013 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21014 which specifies the distance in bits from the highest order bit of the
21015 "containing object" for the bit-field to the highest order bit of the
21016 bit-field itself.
21017
21018 For any given bit-field, the "containing object" is a hypothetical object
21019 (of some integral or enum type) within which the given bit-field lives. The
21020 type of this hypothetical "containing object" is always the same as the
21021 declared type of the individual bit-field itself. The determination of the
21022 exact location of the "containing object" for a bit-field is rather
21023 complicated. It's handled by the `field_byte_offset' function (above).
21024
21025 CTX is required: see the comment for VLR_CONTEXT.
21026
21027 Note that it is the size (in bytes) of the hypothetical "containing object"
21028 which will be given in the DW_AT_byte_size attribute for this bit-field.
21029 (See `byte_size_attribute' above). */
21030
21031 static inline void
21032 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21033 {
21034 HOST_WIDE_INT object_offset_in_bytes;
21035 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21036 HOST_WIDE_INT bitpos_int;
21037 HOST_WIDE_INT highest_order_object_bit_offset;
21038 HOST_WIDE_INT highest_order_field_bit_offset;
21039 HOST_WIDE_INT bit_offset;
21040
21041 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21042
21043 /* Must be a field and a bit field. */
21044 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21045
21046 /* We can't yet handle bit-fields whose offsets are variable, so if we
21047 encounter such things, just return without generating any attribute
21048 whatsoever. Likewise for variable or too large size. */
21049 if (! tree_fits_shwi_p (bit_position (decl))
21050 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21051 return;
21052
21053 bitpos_int = int_bit_position (decl);
21054
21055 /* Note that the bit offset is always the distance (in bits) from the
21056 highest-order bit of the "containing object" to the highest-order bit of
21057 the bit-field itself. Since the "high-order end" of any object or field
21058 is different on big-endian and little-endian machines, the computation
21059 below must take account of these differences. */
21060 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21061 highest_order_field_bit_offset = bitpos_int;
21062
21063 if (! BYTES_BIG_ENDIAN)
21064 {
21065 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21066 highest_order_object_bit_offset +=
21067 simple_type_size_in_bits (original_type);
21068 }
21069
21070 bit_offset
21071 = (! BYTES_BIG_ENDIAN
21072 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21073 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21074
21075 if (bit_offset < 0)
21076 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21077 else
21078 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21079 }
21080
21081 /* For a FIELD_DECL node which represents a bit field, output an attribute
21082 which specifies the length in bits of the given field. */
21083
21084 static inline void
21085 add_bit_size_attribute (dw_die_ref die, tree decl)
21086 {
21087 /* Must be a field and a bit field. */
21088 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21089 && DECL_BIT_FIELD_TYPE (decl));
21090
21091 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21092 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21093 }
21094
21095 /* If the compiled language is ANSI C, then add a 'prototyped'
21096 attribute, if arg types are given for the parameters of a function. */
21097
21098 static inline void
21099 add_prototyped_attribute (dw_die_ref die, tree func_type)
21100 {
21101 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21102 {
21103 case DW_LANG_C:
21104 case DW_LANG_C89:
21105 case DW_LANG_C99:
21106 case DW_LANG_C11:
21107 case DW_LANG_ObjC:
21108 if (prototype_p (func_type))
21109 add_AT_flag (die, DW_AT_prototyped, 1);
21110 break;
21111 default:
21112 break;
21113 }
21114 }
21115
21116 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21117 by looking in the type declaration, the object declaration equate table or
21118 the block mapping. */
21119
21120 static inline dw_die_ref
21121 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21122 {
21123 dw_die_ref origin_die = NULL;
21124
21125 if (DECL_P (origin))
21126 {
21127 dw_die_ref c;
21128 origin_die = lookup_decl_die (origin);
21129 /* "Unwrap" the decls DIE which we put in the imported unit context.
21130 We are looking for the abstract copy here. */
21131 if (in_lto_p
21132 && origin_die
21133 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
21134 /* ??? Identify this better. */
21135 && c->with_offset)
21136 origin_die = c;
21137 }
21138 else if (TYPE_P (origin))
21139 origin_die = lookup_type_die (origin);
21140 else if (TREE_CODE (origin) == BLOCK)
21141 origin_die = BLOCK_DIE (origin);
21142
21143 /* XXX: Functions that are never lowered don't always have correct block
21144 trees (in the case of java, they simply have no block tree, in some other
21145 languages). For these functions, there is nothing we can really do to
21146 output correct debug info for inlined functions in all cases. Rather
21147 than die, we'll just produce deficient debug info now, in that we will
21148 have variables without a proper abstract origin. In the future, when all
21149 functions are lowered, we should re-add a gcc_assert (origin_die)
21150 here. */
21151
21152 if (origin_die)
21153 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21154 return origin_die;
21155 }
21156
21157 /* We do not currently support the pure_virtual attribute. */
21158
21159 static inline void
21160 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21161 {
21162 if (DECL_VINDEX (func_decl))
21163 {
21164 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21165
21166 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21167 add_AT_loc (die, DW_AT_vtable_elem_location,
21168 new_loc_descr (DW_OP_constu,
21169 tree_to_shwi (DECL_VINDEX (func_decl)),
21170 0));
21171
21172 /* GNU extension: Record what type this method came from originally. */
21173 if (debug_info_level > DINFO_LEVEL_TERSE
21174 && DECL_CONTEXT (func_decl))
21175 add_AT_die_ref (die, DW_AT_containing_type,
21176 lookup_type_die (DECL_CONTEXT (func_decl)));
21177 }
21178 }
21179 \f
21180 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21181 given decl. This used to be a vendor extension until after DWARF 4
21182 standardized it. */
21183
21184 static void
21185 add_linkage_attr (dw_die_ref die, tree decl)
21186 {
21187 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21188
21189 /* Mimic what assemble_name_raw does with a leading '*'. */
21190 if (name[0] == '*')
21191 name = &name[1];
21192
21193 if (dwarf_version >= 4)
21194 add_AT_string (die, DW_AT_linkage_name, name);
21195 else
21196 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21197 }
21198
21199 /* Add source coordinate attributes for the given decl. */
21200
21201 static void
21202 add_src_coords_attributes (dw_die_ref die, tree decl)
21203 {
21204 expanded_location s;
21205
21206 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21207 return;
21208 s = expand_location (DECL_SOURCE_LOCATION (decl));
21209 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21210 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21211 if (debug_column_info && s.column)
21212 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21213 }
21214
21215 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21216
21217 static void
21218 add_linkage_name_raw (dw_die_ref die, tree decl)
21219 {
21220 /* Defer until we have an assembler name set. */
21221 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21222 {
21223 limbo_die_node *asm_name;
21224
21225 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21226 asm_name->die = die;
21227 asm_name->created_for = decl;
21228 asm_name->next = deferred_asm_name;
21229 deferred_asm_name = asm_name;
21230 }
21231 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21232 add_linkage_attr (die, decl);
21233 }
21234
21235 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21236
21237 static void
21238 add_linkage_name (dw_die_ref die, tree decl)
21239 {
21240 if (debug_info_level > DINFO_LEVEL_NONE
21241 && VAR_OR_FUNCTION_DECL_P (decl)
21242 && TREE_PUBLIC (decl)
21243 && !(VAR_P (decl) && DECL_REGISTER (decl))
21244 && die->die_tag != DW_TAG_member)
21245 add_linkage_name_raw (die, decl);
21246 }
21247
21248 /* Add a DW_AT_name attribute and source coordinate attribute for the
21249 given decl, but only if it actually has a name. */
21250
21251 static void
21252 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21253 bool no_linkage_name)
21254 {
21255 tree decl_name;
21256
21257 decl_name = DECL_NAME (decl);
21258 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21259 {
21260 const char *name = dwarf2_name (decl, 0);
21261 if (name)
21262 add_name_attribute (die, name);
21263 if (! DECL_ARTIFICIAL (decl))
21264 add_src_coords_attributes (die, decl);
21265
21266 if (!no_linkage_name)
21267 add_linkage_name (die, decl);
21268 }
21269
21270 #ifdef VMS_DEBUGGING_INFO
21271 /* Get the function's name, as described by its RTL. This may be different
21272 from the DECL_NAME name used in the source file. */
21273 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21274 {
21275 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21276 XEXP (DECL_RTL (decl), 0), false);
21277 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21278 }
21279 #endif /* VMS_DEBUGGING_INFO */
21280 }
21281
21282 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21283
21284 static void
21285 add_discr_value (dw_die_ref die, dw_discr_value *value)
21286 {
21287 dw_attr_node attr;
21288
21289 attr.dw_attr = DW_AT_discr_value;
21290 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21291 attr.dw_attr_val.val_entry = NULL;
21292 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21293 if (value->pos)
21294 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21295 else
21296 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21297 add_dwarf_attr (die, &attr);
21298 }
21299
21300 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21301
21302 static void
21303 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21304 {
21305 dw_attr_node attr;
21306
21307 attr.dw_attr = DW_AT_discr_list;
21308 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21309 attr.dw_attr_val.val_entry = NULL;
21310 attr.dw_attr_val.v.val_discr_list = discr_list;
21311 add_dwarf_attr (die, &attr);
21312 }
21313
21314 static inline dw_discr_list_ref
21315 AT_discr_list (dw_attr_node *attr)
21316 {
21317 return attr->dw_attr_val.v.val_discr_list;
21318 }
21319
21320 #ifdef VMS_DEBUGGING_INFO
21321 /* Output the debug main pointer die for VMS */
21322
21323 void
21324 dwarf2out_vms_debug_main_pointer (void)
21325 {
21326 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21327 dw_die_ref die;
21328
21329 /* Allocate the VMS debug main subprogram die. */
21330 die = new_die_raw (DW_TAG_subprogram);
21331 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21332 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21333 current_function_funcdef_no);
21334 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21335
21336 /* Make it the first child of comp_unit_die (). */
21337 die->die_parent = comp_unit_die ();
21338 if (comp_unit_die ()->die_child)
21339 {
21340 die->die_sib = comp_unit_die ()->die_child->die_sib;
21341 comp_unit_die ()->die_child->die_sib = die;
21342 }
21343 else
21344 {
21345 die->die_sib = die;
21346 comp_unit_die ()->die_child = die;
21347 }
21348 }
21349 #endif /* VMS_DEBUGGING_INFO */
21350
21351 /* Push a new declaration scope. */
21352
21353 static void
21354 push_decl_scope (tree scope)
21355 {
21356 vec_safe_push (decl_scope_table, scope);
21357 }
21358
21359 /* Pop a declaration scope. */
21360
21361 static inline void
21362 pop_decl_scope (void)
21363 {
21364 decl_scope_table->pop ();
21365 }
21366
21367 /* walk_tree helper function for uses_local_type, below. */
21368
21369 static tree
21370 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21371 {
21372 if (!TYPE_P (*tp))
21373 *walk_subtrees = 0;
21374 else
21375 {
21376 tree name = TYPE_NAME (*tp);
21377 if (name && DECL_P (name) && decl_function_context (name))
21378 return *tp;
21379 }
21380 return NULL_TREE;
21381 }
21382
21383 /* If TYPE involves a function-local type (including a local typedef to a
21384 non-local type), returns that type; otherwise returns NULL_TREE. */
21385
21386 static tree
21387 uses_local_type (tree type)
21388 {
21389 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21390 return used;
21391 }
21392
21393 /* Return the DIE for the scope that immediately contains this type.
21394 Non-named types that do not involve a function-local type get global
21395 scope. Named types nested in namespaces or other types get their
21396 containing scope. All other types (i.e. function-local named types) get
21397 the current active scope. */
21398
21399 static dw_die_ref
21400 scope_die_for (tree t, dw_die_ref context_die)
21401 {
21402 dw_die_ref scope_die = NULL;
21403 tree containing_scope;
21404
21405 /* Non-types always go in the current scope. */
21406 gcc_assert (TYPE_P (t));
21407
21408 /* Use the scope of the typedef, rather than the scope of the type
21409 it refers to. */
21410 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21411 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21412 else
21413 containing_scope = TYPE_CONTEXT (t);
21414
21415 /* Use the containing namespace if there is one. */
21416 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21417 {
21418 if (context_die == lookup_decl_die (containing_scope))
21419 /* OK */;
21420 else if (debug_info_level > DINFO_LEVEL_TERSE)
21421 context_die = get_context_die (containing_scope);
21422 else
21423 containing_scope = NULL_TREE;
21424 }
21425
21426 /* Ignore function type "scopes" from the C frontend. They mean that
21427 a tagged type is local to a parmlist of a function declarator, but
21428 that isn't useful to DWARF. */
21429 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21430 containing_scope = NULL_TREE;
21431
21432 if (SCOPE_FILE_SCOPE_P (containing_scope))
21433 {
21434 /* If T uses a local type keep it local as well, to avoid references
21435 to function-local DIEs from outside the function. */
21436 if (current_function_decl && uses_local_type (t))
21437 scope_die = context_die;
21438 else
21439 scope_die = comp_unit_die ();
21440 }
21441 else if (TYPE_P (containing_scope))
21442 {
21443 /* For types, we can just look up the appropriate DIE. */
21444 if (debug_info_level > DINFO_LEVEL_TERSE)
21445 scope_die = get_context_die (containing_scope);
21446 else
21447 {
21448 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21449 if (scope_die == NULL)
21450 scope_die = comp_unit_die ();
21451 }
21452 }
21453 else
21454 scope_die = context_die;
21455
21456 return scope_die;
21457 }
21458
21459 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21460
21461 static inline int
21462 local_scope_p (dw_die_ref context_die)
21463 {
21464 for (; context_die; context_die = context_die->die_parent)
21465 if (context_die->die_tag == DW_TAG_inlined_subroutine
21466 || context_die->die_tag == DW_TAG_subprogram)
21467 return 1;
21468
21469 return 0;
21470 }
21471
21472 /* Returns nonzero if CONTEXT_DIE is a class. */
21473
21474 static inline int
21475 class_scope_p (dw_die_ref context_die)
21476 {
21477 return (context_die
21478 && (context_die->die_tag == DW_TAG_structure_type
21479 || context_die->die_tag == DW_TAG_class_type
21480 || context_die->die_tag == DW_TAG_interface_type
21481 || context_die->die_tag == DW_TAG_union_type));
21482 }
21483
21484 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21485 whether or not to treat a DIE in this context as a declaration. */
21486
21487 static inline int
21488 class_or_namespace_scope_p (dw_die_ref context_die)
21489 {
21490 return (class_scope_p (context_die)
21491 || (context_die && context_die->die_tag == DW_TAG_namespace));
21492 }
21493
21494 /* Many forms of DIEs require a "type description" attribute. This
21495 routine locates the proper "type descriptor" die for the type given
21496 by 'type' plus any additional qualifiers given by 'cv_quals', and
21497 adds a DW_AT_type attribute below the given die. */
21498
21499 static void
21500 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21501 bool reverse, dw_die_ref context_die)
21502 {
21503 enum tree_code code = TREE_CODE (type);
21504 dw_die_ref type_die = NULL;
21505
21506 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21507 or fixed-point type, use the inner type. This is because we have no
21508 support for unnamed types in base_type_die. This can happen if this is
21509 an Ada subrange type. Correct solution is emit a subrange type die. */
21510 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21511 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21512 type = TREE_TYPE (type), code = TREE_CODE (type);
21513
21514 if (code == ERROR_MARK
21515 /* Handle a special case. For functions whose return type is void, we
21516 generate *no* type attribute. (Note that no object may have type
21517 `void', so this only applies to function return types). */
21518 || code == VOID_TYPE)
21519 return;
21520
21521 type_die = modified_type_die (type,
21522 cv_quals | TYPE_QUALS (type),
21523 reverse,
21524 context_die);
21525
21526 if (type_die != NULL)
21527 add_AT_die_ref (object_die, DW_AT_type, type_die);
21528 }
21529
21530 /* Given an object die, add the calling convention attribute for the
21531 function call type. */
21532 static void
21533 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21534 {
21535 enum dwarf_calling_convention value = DW_CC_normal;
21536
21537 value = ((enum dwarf_calling_convention)
21538 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21539
21540 if (is_fortran ()
21541 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21542 {
21543 /* DWARF 2 doesn't provide a way to identify a program's source-level
21544 entry point. DW_AT_calling_convention attributes are only meant
21545 to describe functions' calling conventions. However, lacking a
21546 better way to signal the Fortran main program, we used this for
21547 a long time, following existing custom. Now, DWARF 4 has
21548 DW_AT_main_subprogram, which we add below, but some tools still
21549 rely on the old way, which we thus keep. */
21550 value = DW_CC_program;
21551
21552 if (dwarf_version >= 4 || !dwarf_strict)
21553 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21554 }
21555
21556 /* Only add the attribute if the backend requests it, and
21557 is not DW_CC_normal. */
21558 if (value && (value != DW_CC_normal))
21559 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21560 }
21561
21562 /* Given a tree pointer to a struct, class, union, or enum type node, return
21563 a pointer to the (string) tag name for the given type, or zero if the type
21564 was declared without a tag. */
21565
21566 static const char *
21567 type_tag (const_tree type)
21568 {
21569 const char *name = 0;
21570
21571 if (TYPE_NAME (type) != 0)
21572 {
21573 tree t = 0;
21574
21575 /* Find the IDENTIFIER_NODE for the type name. */
21576 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21577 && !TYPE_NAMELESS (type))
21578 t = TYPE_NAME (type);
21579
21580 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21581 a TYPE_DECL node, regardless of whether or not a `typedef' was
21582 involved. */
21583 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21584 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21585 {
21586 /* We want to be extra verbose. Don't call dwarf_name if
21587 DECL_NAME isn't set. The default hook for decl_printable_name
21588 doesn't like that, and in this context it's correct to return
21589 0, instead of "<anonymous>" or the like. */
21590 if (DECL_NAME (TYPE_NAME (type))
21591 && !DECL_NAMELESS (TYPE_NAME (type)))
21592 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21593 }
21594
21595 /* Now get the name as a string, or invent one. */
21596 if (!name && t != 0)
21597 name = IDENTIFIER_POINTER (t);
21598 }
21599
21600 return (name == 0 || *name == '\0') ? 0 : name;
21601 }
21602
21603 /* Return the type associated with a data member, make a special check
21604 for bit field types. */
21605
21606 static inline tree
21607 member_declared_type (const_tree member)
21608 {
21609 return (DECL_BIT_FIELD_TYPE (member)
21610 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21611 }
21612
21613 /* Get the decl's label, as described by its RTL. This may be different
21614 from the DECL_NAME name used in the source file. */
21615
21616 #if 0
21617 static const char *
21618 decl_start_label (tree decl)
21619 {
21620 rtx x;
21621 const char *fnname;
21622
21623 x = DECL_RTL (decl);
21624 gcc_assert (MEM_P (x));
21625
21626 x = XEXP (x, 0);
21627 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21628
21629 fnname = XSTR (x, 0);
21630 return fnname;
21631 }
21632 #endif
21633 \f
21634 /* For variable-length arrays that have been previously generated, but
21635 may be incomplete due to missing subscript info, fill the subscript
21636 info. Return TRUE if this is one of those cases. */
21637 static bool
21638 fill_variable_array_bounds (tree type)
21639 {
21640 if (TREE_ASM_WRITTEN (type)
21641 && TREE_CODE (type) == ARRAY_TYPE
21642 && variably_modified_type_p (type, NULL))
21643 {
21644 dw_die_ref array_die = lookup_type_die (type);
21645 if (!array_die)
21646 return false;
21647 add_subscript_info (array_die, type, !is_ada ());
21648 return true;
21649 }
21650 return false;
21651 }
21652
21653 /* These routines generate the internal representation of the DIE's for
21654 the compilation unit. Debugging information is collected by walking
21655 the declaration trees passed in from dwarf2out_decl(). */
21656
21657 static void
21658 gen_array_type_die (tree type, dw_die_ref context_die)
21659 {
21660 dw_die_ref array_die;
21661
21662 /* GNU compilers represent multidimensional array types as sequences of one
21663 dimensional array types whose element types are themselves array types.
21664 We sometimes squish that down to a single array_type DIE with multiple
21665 subscripts in the Dwarf debugging info. The draft Dwarf specification
21666 say that we are allowed to do this kind of compression in C, because
21667 there is no difference between an array of arrays and a multidimensional
21668 array. We don't do this for Ada to remain as close as possible to the
21669 actual representation, which is especially important against the language
21670 flexibilty wrt arrays of variable size. */
21671
21672 bool collapse_nested_arrays = !is_ada ();
21673
21674 if (fill_variable_array_bounds (type))
21675 return;
21676
21677 dw_die_ref scope_die = scope_die_for (type, context_die);
21678 tree element_type;
21679
21680 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21681 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21682 if (TYPE_STRING_FLAG (type)
21683 && TREE_CODE (type) == ARRAY_TYPE
21684 && is_fortran ()
21685 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21686 {
21687 HOST_WIDE_INT size;
21688
21689 array_die = new_die (DW_TAG_string_type, scope_die, type);
21690 add_name_attribute (array_die, type_tag (type));
21691 equate_type_number_to_die (type, array_die);
21692 size = int_size_in_bytes (type);
21693 if (size >= 0)
21694 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21695 /* ??? We can't annotate types late, but for LTO we may not
21696 generate a location early either (gfortran.dg/save_6.f90). */
21697 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21698 && TYPE_DOMAIN (type) != NULL_TREE
21699 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21700 {
21701 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21702 tree rszdecl = szdecl;
21703
21704 size = int_size_in_bytes (TREE_TYPE (szdecl));
21705 if (!DECL_P (szdecl))
21706 {
21707 if (TREE_CODE (szdecl) == INDIRECT_REF
21708 && DECL_P (TREE_OPERAND (szdecl, 0)))
21709 {
21710 rszdecl = TREE_OPERAND (szdecl, 0);
21711 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21712 != DWARF2_ADDR_SIZE)
21713 size = 0;
21714 }
21715 else
21716 size = 0;
21717 }
21718 if (size > 0)
21719 {
21720 dw_loc_list_ref loc
21721 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21722 NULL);
21723 if (loc)
21724 {
21725 add_AT_location_description (array_die, DW_AT_string_length,
21726 loc);
21727 if (size != DWARF2_ADDR_SIZE)
21728 add_AT_unsigned (array_die, dwarf_version >= 5
21729 ? DW_AT_string_length_byte_size
21730 : DW_AT_byte_size, size);
21731 }
21732 }
21733 }
21734 return;
21735 }
21736
21737 array_die = new_die (DW_TAG_array_type, scope_die, type);
21738 add_name_attribute (array_die, type_tag (type));
21739 equate_type_number_to_die (type, array_die);
21740
21741 if (TREE_CODE (type) == VECTOR_TYPE)
21742 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21743
21744 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21745 if (is_fortran ()
21746 && TREE_CODE (type) == ARRAY_TYPE
21747 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21748 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21749 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21750
21751 #if 0
21752 /* We default the array ordering. Debuggers will probably do the right
21753 things even if DW_AT_ordering is not present. It's not even an issue
21754 until we start to get into multidimensional arrays anyway. If a debugger
21755 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21756 then we'll have to put the DW_AT_ordering attribute back in. (But if
21757 and when we find out that we need to put these in, we will only do so
21758 for multidimensional arrays. */
21759 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21760 #endif
21761
21762 if (TREE_CODE (type) == VECTOR_TYPE)
21763 {
21764 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21765 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21766 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21767 add_bound_info (subrange_die, DW_AT_upper_bound,
21768 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21769 }
21770 else
21771 add_subscript_info (array_die, type, collapse_nested_arrays);
21772
21773 /* Add representation of the type of the elements of this array type and
21774 emit the corresponding DIE if we haven't done it already. */
21775 element_type = TREE_TYPE (type);
21776 if (collapse_nested_arrays)
21777 while (TREE_CODE (element_type) == ARRAY_TYPE)
21778 {
21779 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21780 break;
21781 element_type = TREE_TYPE (element_type);
21782 }
21783
21784 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21785 TREE_CODE (type) == ARRAY_TYPE
21786 && TYPE_REVERSE_STORAGE_ORDER (type),
21787 context_die);
21788
21789 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21790 if (TYPE_ARTIFICIAL (type))
21791 add_AT_flag (array_die, DW_AT_artificial, 1);
21792
21793 if (get_AT (array_die, DW_AT_name))
21794 add_pubtype (type, array_die);
21795
21796 add_alignment_attribute (array_die, type);
21797 }
21798
21799 /* This routine generates DIE for array with hidden descriptor, details
21800 are filled into *info by a langhook. */
21801
21802 static void
21803 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21804 dw_die_ref context_die)
21805 {
21806 const dw_die_ref scope_die = scope_die_for (type, context_die);
21807 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21808 struct loc_descr_context context = { type, info->base_decl, NULL,
21809 false, false };
21810 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21811 int dim;
21812
21813 add_name_attribute (array_die, type_tag (type));
21814 equate_type_number_to_die (type, array_die);
21815
21816 if (info->ndimensions > 1)
21817 switch (info->ordering)
21818 {
21819 case array_descr_ordering_row_major:
21820 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21821 break;
21822 case array_descr_ordering_column_major:
21823 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21824 break;
21825 default:
21826 break;
21827 }
21828
21829 if (dwarf_version >= 3 || !dwarf_strict)
21830 {
21831 if (info->data_location)
21832 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21833 dw_scalar_form_exprloc, &context);
21834 if (info->associated)
21835 add_scalar_info (array_die, DW_AT_associated, info->associated,
21836 dw_scalar_form_constant
21837 | dw_scalar_form_exprloc
21838 | dw_scalar_form_reference, &context);
21839 if (info->allocated)
21840 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21841 dw_scalar_form_constant
21842 | dw_scalar_form_exprloc
21843 | dw_scalar_form_reference, &context);
21844 if (info->stride)
21845 {
21846 const enum dwarf_attribute attr
21847 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21848 const int forms
21849 = (info->stride_in_bits)
21850 ? dw_scalar_form_constant
21851 : (dw_scalar_form_constant
21852 | dw_scalar_form_exprloc
21853 | dw_scalar_form_reference);
21854
21855 add_scalar_info (array_die, attr, info->stride, forms, &context);
21856 }
21857 }
21858 if (dwarf_version >= 5)
21859 {
21860 if (info->rank)
21861 {
21862 add_scalar_info (array_die, DW_AT_rank, info->rank,
21863 dw_scalar_form_constant
21864 | dw_scalar_form_exprloc, &context);
21865 subrange_tag = DW_TAG_generic_subrange;
21866 context.placeholder_arg = true;
21867 }
21868 }
21869
21870 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21871
21872 for (dim = 0; dim < info->ndimensions; dim++)
21873 {
21874 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21875
21876 if (info->dimen[dim].bounds_type)
21877 add_type_attribute (subrange_die,
21878 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21879 false, context_die);
21880 if (info->dimen[dim].lower_bound)
21881 add_bound_info (subrange_die, DW_AT_lower_bound,
21882 info->dimen[dim].lower_bound, &context);
21883 if (info->dimen[dim].upper_bound)
21884 add_bound_info (subrange_die, DW_AT_upper_bound,
21885 info->dimen[dim].upper_bound, &context);
21886 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21887 add_scalar_info (subrange_die, DW_AT_byte_stride,
21888 info->dimen[dim].stride,
21889 dw_scalar_form_constant
21890 | dw_scalar_form_exprloc
21891 | dw_scalar_form_reference,
21892 &context);
21893 }
21894
21895 gen_type_die (info->element_type, context_die);
21896 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21897 TREE_CODE (type) == ARRAY_TYPE
21898 && TYPE_REVERSE_STORAGE_ORDER (type),
21899 context_die);
21900
21901 if (get_AT (array_die, DW_AT_name))
21902 add_pubtype (type, array_die);
21903
21904 add_alignment_attribute (array_die, type);
21905 }
21906
21907 #if 0
21908 static void
21909 gen_entry_point_die (tree decl, dw_die_ref context_die)
21910 {
21911 tree origin = decl_ultimate_origin (decl);
21912 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21913
21914 if (origin != NULL)
21915 add_abstract_origin_attribute (decl_die, origin);
21916 else
21917 {
21918 add_name_and_src_coords_attributes (decl_die, decl);
21919 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21920 TYPE_UNQUALIFIED, false, context_die);
21921 }
21922
21923 if (DECL_ABSTRACT_P (decl))
21924 equate_decl_number_to_die (decl, decl_die);
21925 else
21926 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21927 }
21928 #endif
21929
21930 /* Walk through the list of incomplete types again, trying once more to
21931 emit full debugging info for them. */
21932
21933 static void
21934 retry_incomplete_types (void)
21935 {
21936 set_early_dwarf s;
21937 int i;
21938
21939 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21940 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21941 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21942 vec_safe_truncate (incomplete_types, 0);
21943 }
21944
21945 /* Determine what tag to use for a record type. */
21946
21947 static enum dwarf_tag
21948 record_type_tag (tree type)
21949 {
21950 if (! lang_hooks.types.classify_record)
21951 return DW_TAG_structure_type;
21952
21953 switch (lang_hooks.types.classify_record (type))
21954 {
21955 case RECORD_IS_STRUCT:
21956 return DW_TAG_structure_type;
21957
21958 case RECORD_IS_CLASS:
21959 return DW_TAG_class_type;
21960
21961 case RECORD_IS_INTERFACE:
21962 if (dwarf_version >= 3 || !dwarf_strict)
21963 return DW_TAG_interface_type;
21964 return DW_TAG_structure_type;
21965
21966 default:
21967 gcc_unreachable ();
21968 }
21969 }
21970
21971 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21972 include all of the information about the enumeration values also. Each
21973 enumerated type name/value is listed as a child of the enumerated type
21974 DIE. */
21975
21976 static dw_die_ref
21977 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21978 {
21979 dw_die_ref type_die = lookup_type_die (type);
21980 dw_die_ref orig_type_die = type_die;
21981
21982 if (type_die == NULL)
21983 {
21984 type_die = new_die (DW_TAG_enumeration_type,
21985 scope_die_for (type, context_die), type);
21986 equate_type_number_to_die (type, type_die);
21987 add_name_attribute (type_die, type_tag (type));
21988 if ((dwarf_version >= 4 || !dwarf_strict)
21989 && ENUM_IS_SCOPED (type))
21990 add_AT_flag (type_die, DW_AT_enum_class, 1);
21991 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
21992 add_AT_flag (type_die, DW_AT_declaration, 1);
21993 if (!dwarf_strict)
21994 add_AT_unsigned (type_die, DW_AT_encoding,
21995 TYPE_UNSIGNED (type)
21996 ? DW_ATE_unsigned
21997 : DW_ATE_signed);
21998 }
21999 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22000 return type_die;
22001 else
22002 remove_AT (type_die, DW_AT_declaration);
22003
22004 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22005 given enum type is incomplete, do not generate the DW_AT_byte_size
22006 attribute or the DW_AT_element_list attribute. */
22007 if (TYPE_SIZE (type))
22008 {
22009 tree link;
22010
22011 if (!ENUM_IS_OPAQUE (type))
22012 TREE_ASM_WRITTEN (type) = 1;
22013 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22014 add_byte_size_attribute (type_die, type);
22015 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22016 add_alignment_attribute (type_die, type);
22017 if ((dwarf_version >= 3 || !dwarf_strict)
22018 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22019 {
22020 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22021 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22022 context_die);
22023 }
22024 if (TYPE_STUB_DECL (type) != NULL_TREE)
22025 {
22026 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22027 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22028 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22029 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22030 }
22031
22032 /* If the first reference to this type was as the return type of an
22033 inline function, then it may not have a parent. Fix this now. */
22034 if (type_die->die_parent == NULL)
22035 add_child_die (scope_die_for (type, context_die), type_die);
22036
22037 for (link = TYPE_VALUES (type);
22038 link != NULL; link = TREE_CHAIN (link))
22039 {
22040 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22041 tree value = TREE_VALUE (link);
22042
22043 gcc_assert (!ENUM_IS_OPAQUE (type));
22044 add_name_attribute (enum_die,
22045 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22046
22047 if (TREE_CODE (value) == CONST_DECL)
22048 value = DECL_INITIAL (value);
22049
22050 if (simple_type_size_in_bits (TREE_TYPE (value))
22051 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22052 {
22053 /* For constant forms created by add_AT_unsigned DWARF
22054 consumers (GDB, elfutils, etc.) always zero extend
22055 the value. Only when the actual value is negative
22056 do we need to use add_AT_int to generate a constant
22057 form that can represent negative values. */
22058 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22059 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22060 add_AT_unsigned (enum_die, DW_AT_const_value,
22061 (unsigned HOST_WIDE_INT) val);
22062 else
22063 add_AT_int (enum_die, DW_AT_const_value, val);
22064 }
22065 else
22066 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22067 that here. TODO: This should be re-worked to use correct
22068 signed/unsigned double tags for all cases. */
22069 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22070 }
22071
22072 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22073 if (TYPE_ARTIFICIAL (type)
22074 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22075 add_AT_flag (type_die, DW_AT_artificial, 1);
22076 }
22077 else
22078 add_AT_flag (type_die, DW_AT_declaration, 1);
22079
22080 add_pubtype (type, type_die);
22081
22082 return type_die;
22083 }
22084
22085 /* Generate a DIE to represent either a real live formal parameter decl or to
22086 represent just the type of some formal parameter position in some function
22087 type.
22088
22089 Note that this routine is a bit unusual because its argument may be a
22090 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22091 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22092 node. If it's the former then this function is being called to output a
22093 DIE to represent a formal parameter object (or some inlining thereof). If
22094 it's the latter, then this function is only being called to output a
22095 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22096 argument type of some subprogram type.
22097 If EMIT_NAME_P is true, name and source coordinate attributes
22098 are emitted. */
22099
22100 static dw_die_ref
22101 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22102 dw_die_ref context_die)
22103 {
22104 tree node_or_origin = node ? node : origin;
22105 tree ultimate_origin;
22106 dw_die_ref parm_die = NULL;
22107
22108 if (DECL_P (node_or_origin))
22109 {
22110 parm_die = lookup_decl_die (node);
22111
22112 /* If the contexts differ, we may not be talking about the same
22113 thing.
22114 ??? When in LTO the DIE parent is the "abstract" copy and the
22115 context_die is the specification "copy". But this whole block
22116 should eventually be no longer needed. */
22117 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22118 {
22119 if (!DECL_ABSTRACT_P (node))
22120 {
22121 /* This can happen when creating an inlined instance, in
22122 which case we need to create a new DIE that will get
22123 annotated with DW_AT_abstract_origin. */
22124 parm_die = NULL;
22125 }
22126 else
22127 gcc_unreachable ();
22128 }
22129
22130 if (parm_die && parm_die->die_parent == NULL)
22131 {
22132 /* Check that parm_die already has the right attributes that
22133 we would have added below. If any attributes are
22134 missing, fall through to add them. */
22135 if (! DECL_ABSTRACT_P (node_or_origin)
22136 && !get_AT (parm_die, DW_AT_location)
22137 && !get_AT (parm_die, DW_AT_const_value))
22138 /* We are missing location info, and are about to add it. */
22139 ;
22140 else
22141 {
22142 add_child_die (context_die, parm_die);
22143 return parm_die;
22144 }
22145 }
22146 }
22147
22148 /* If we have a previously generated DIE, use it, unless this is an
22149 concrete instance (origin != NULL), in which case we need a new
22150 DIE with a corresponding DW_AT_abstract_origin. */
22151 bool reusing_die;
22152 if (parm_die && origin == NULL)
22153 reusing_die = true;
22154 else
22155 {
22156 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22157 reusing_die = false;
22158 }
22159
22160 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22161 {
22162 case tcc_declaration:
22163 ultimate_origin = decl_ultimate_origin (node_or_origin);
22164 if (node || ultimate_origin)
22165 origin = ultimate_origin;
22166
22167 if (reusing_die)
22168 goto add_location;
22169
22170 if (origin != NULL)
22171 add_abstract_origin_attribute (parm_die, origin);
22172 else if (emit_name_p)
22173 add_name_and_src_coords_attributes (parm_die, node);
22174 if (origin == NULL
22175 || (! DECL_ABSTRACT_P (node_or_origin)
22176 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22177 decl_function_context
22178 (node_or_origin))))
22179 {
22180 tree type = TREE_TYPE (node_or_origin);
22181 if (decl_by_reference_p (node_or_origin))
22182 add_type_attribute (parm_die, TREE_TYPE (type),
22183 TYPE_UNQUALIFIED,
22184 false, context_die);
22185 else
22186 add_type_attribute (parm_die, type,
22187 decl_quals (node_or_origin),
22188 false, context_die);
22189 }
22190 if (origin == NULL && DECL_ARTIFICIAL (node))
22191 add_AT_flag (parm_die, DW_AT_artificial, 1);
22192 add_location:
22193 if (node && node != origin)
22194 equate_decl_number_to_die (node, parm_die);
22195 if (! DECL_ABSTRACT_P (node_or_origin))
22196 add_location_or_const_value_attribute (parm_die, node_or_origin,
22197 node == NULL);
22198
22199 break;
22200
22201 case tcc_type:
22202 /* We were called with some kind of a ..._TYPE node. */
22203 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22204 context_die);
22205 break;
22206
22207 default:
22208 gcc_unreachable ();
22209 }
22210
22211 return parm_die;
22212 }
22213
22214 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22215 children DW_TAG_formal_parameter DIEs representing the arguments of the
22216 parameter pack.
22217
22218 PARM_PACK must be a function parameter pack.
22219 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22220 must point to the subsequent arguments of the function PACK_ARG belongs to.
22221 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22222 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22223 following the last one for which a DIE was generated. */
22224
22225 static dw_die_ref
22226 gen_formal_parameter_pack_die (tree parm_pack,
22227 tree pack_arg,
22228 dw_die_ref subr_die,
22229 tree *next_arg)
22230 {
22231 tree arg;
22232 dw_die_ref parm_pack_die;
22233
22234 gcc_assert (parm_pack
22235 && lang_hooks.function_parameter_pack_p (parm_pack)
22236 && subr_die);
22237
22238 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22239 add_src_coords_attributes (parm_pack_die, parm_pack);
22240
22241 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22242 {
22243 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22244 parm_pack))
22245 break;
22246 gen_formal_parameter_die (arg, NULL,
22247 false /* Don't emit name attribute. */,
22248 parm_pack_die);
22249 }
22250 if (next_arg)
22251 *next_arg = arg;
22252 return parm_pack_die;
22253 }
22254
22255 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22256 at the end of an (ANSI prototyped) formal parameters list. */
22257
22258 static void
22259 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22260 {
22261 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22262 }
22263
22264 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22265 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22266 parameters as specified in some function type specification (except for
22267 those which appear as part of a function *definition*). */
22268
22269 static void
22270 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22271 {
22272 tree link;
22273 tree formal_type = NULL;
22274 tree first_parm_type;
22275 tree arg;
22276
22277 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22278 {
22279 arg = DECL_ARGUMENTS (function_or_method_type);
22280 function_or_method_type = TREE_TYPE (function_or_method_type);
22281 }
22282 else
22283 arg = NULL_TREE;
22284
22285 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22286
22287 /* Make our first pass over the list of formal parameter types and output a
22288 DW_TAG_formal_parameter DIE for each one. */
22289 for (link = first_parm_type; link; )
22290 {
22291 dw_die_ref parm_die;
22292
22293 formal_type = TREE_VALUE (link);
22294 if (formal_type == void_type_node)
22295 break;
22296
22297 /* Output a (nameless) DIE to represent the formal parameter itself. */
22298 if (!POINTER_BOUNDS_TYPE_P (formal_type))
22299 {
22300 parm_die = gen_formal_parameter_die (formal_type, NULL,
22301 true /* Emit name attribute. */,
22302 context_die);
22303 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22304 && link == first_parm_type)
22305 {
22306 add_AT_flag (parm_die, DW_AT_artificial, 1);
22307 if (dwarf_version >= 3 || !dwarf_strict)
22308 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22309 }
22310 else if (arg && DECL_ARTIFICIAL (arg))
22311 add_AT_flag (parm_die, DW_AT_artificial, 1);
22312 }
22313
22314 link = TREE_CHAIN (link);
22315 if (arg)
22316 arg = DECL_CHAIN (arg);
22317 }
22318
22319 /* If this function type has an ellipsis, add a
22320 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22321 if (formal_type != void_type_node)
22322 gen_unspecified_parameters_die (function_or_method_type, context_die);
22323
22324 /* Make our second (and final) pass over the list of formal parameter types
22325 and output DIEs to represent those types (as necessary). */
22326 for (link = TYPE_ARG_TYPES (function_or_method_type);
22327 link && TREE_VALUE (link);
22328 link = TREE_CHAIN (link))
22329 gen_type_die (TREE_VALUE (link), context_die);
22330 }
22331
22332 /* We want to generate the DIE for TYPE so that we can generate the
22333 die for MEMBER, which has been defined; we will need to refer back
22334 to the member declaration nested within TYPE. If we're trying to
22335 generate minimal debug info for TYPE, processing TYPE won't do the
22336 trick; we need to attach the member declaration by hand. */
22337
22338 static void
22339 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22340 {
22341 gen_type_die (type, context_die);
22342
22343 /* If we're trying to avoid duplicate debug info, we may not have
22344 emitted the member decl for this function. Emit it now. */
22345 if (TYPE_STUB_DECL (type)
22346 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22347 && ! lookup_decl_die (member))
22348 {
22349 dw_die_ref type_die;
22350 gcc_assert (!decl_ultimate_origin (member));
22351
22352 push_decl_scope (type);
22353 type_die = lookup_type_die_strip_naming_typedef (type);
22354 if (TREE_CODE (member) == FUNCTION_DECL)
22355 gen_subprogram_die (member, type_die);
22356 else if (TREE_CODE (member) == FIELD_DECL)
22357 {
22358 /* Ignore the nameless fields that are used to skip bits but handle
22359 C++ anonymous unions and structs. */
22360 if (DECL_NAME (member) != NULL_TREE
22361 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22362 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22363 {
22364 struct vlr_context vlr_ctx = {
22365 DECL_CONTEXT (member), /* struct_type */
22366 NULL_TREE /* variant_part_offset */
22367 };
22368 gen_type_die (member_declared_type (member), type_die);
22369 gen_field_die (member, &vlr_ctx, type_die);
22370 }
22371 }
22372 else
22373 gen_variable_die (member, NULL_TREE, type_die);
22374
22375 pop_decl_scope ();
22376 }
22377 }
22378 \f
22379 /* Forward declare these functions, because they are mutually recursive
22380 with their set_block_* pairing functions. */
22381 static void set_decl_origin_self (tree);
22382
22383 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22384 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22385 that it points to the node itself, thus indicating that the node is its
22386 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22387 the given node is NULL, recursively descend the decl/block tree which
22388 it is the root of, and for each other ..._DECL or BLOCK node contained
22389 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22390 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22391 values to point to themselves. */
22392
22393 static void
22394 set_block_origin_self (tree stmt)
22395 {
22396 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22397 {
22398 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22399
22400 {
22401 tree local_decl;
22402
22403 for (local_decl = BLOCK_VARS (stmt);
22404 local_decl != NULL_TREE;
22405 local_decl = DECL_CHAIN (local_decl))
22406 /* Do not recurse on nested functions since the inlining status
22407 of parent and child can be different as per the DWARF spec. */
22408 if (TREE_CODE (local_decl) != FUNCTION_DECL
22409 && !DECL_EXTERNAL (local_decl))
22410 set_decl_origin_self (local_decl);
22411 }
22412
22413 {
22414 tree subblock;
22415
22416 for (subblock = BLOCK_SUBBLOCKS (stmt);
22417 subblock != NULL_TREE;
22418 subblock = BLOCK_CHAIN (subblock))
22419 set_block_origin_self (subblock); /* Recurse. */
22420 }
22421 }
22422 }
22423
22424 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22425 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22426 node to so that it points to the node itself, thus indicating that the
22427 node represents its own (abstract) origin. Additionally, if the
22428 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22429 the decl/block tree of which the given node is the root of, and for
22430 each other ..._DECL or BLOCK node contained therein whose
22431 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22432 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22433 point to themselves. */
22434
22435 static void
22436 set_decl_origin_self (tree decl)
22437 {
22438 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22439 {
22440 DECL_ABSTRACT_ORIGIN (decl) = decl;
22441 if (TREE_CODE (decl) == FUNCTION_DECL)
22442 {
22443 tree arg;
22444
22445 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22446 DECL_ABSTRACT_ORIGIN (arg) = arg;
22447 if (DECL_INITIAL (decl) != NULL_TREE
22448 && DECL_INITIAL (decl) != error_mark_node)
22449 set_block_origin_self (DECL_INITIAL (decl));
22450 }
22451 }
22452 }
22453 \f
22454 /* Mark the early DIE for DECL as the abstract instance. */
22455
22456 static void
22457 dwarf2out_abstract_function (tree decl)
22458 {
22459 dw_die_ref old_die;
22460
22461 /* Make sure we have the actual abstract inline, not a clone. */
22462 decl = DECL_ORIGIN (decl);
22463
22464 if (DECL_IGNORED_P (decl))
22465 return;
22466
22467 old_die = lookup_decl_die (decl);
22468 /* With early debug we always have an old DIE unless we are in LTO
22469 and the user did not compile but only link with debug. */
22470 if (in_lto_p && ! old_die)
22471 return;
22472 gcc_assert (old_die != NULL);
22473 if (get_AT (old_die, DW_AT_inline)
22474 || get_AT (old_die, DW_AT_abstract_origin))
22475 /* We've already generated the abstract instance. */
22476 return;
22477
22478 /* Go ahead and put DW_AT_inline on the DIE. */
22479 if (DECL_DECLARED_INLINE_P (decl))
22480 {
22481 if (cgraph_function_possibly_inlined_p (decl))
22482 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22483 else
22484 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22485 }
22486 else
22487 {
22488 if (cgraph_function_possibly_inlined_p (decl))
22489 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22490 else
22491 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22492 }
22493
22494 if (DECL_DECLARED_INLINE_P (decl)
22495 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22496 add_AT_flag (old_die, DW_AT_artificial, 1);
22497
22498 set_decl_origin_self (decl);
22499 }
22500
22501 /* Helper function of premark_used_types() which gets called through
22502 htab_traverse.
22503
22504 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22505 marked as unused by prune_unused_types. */
22506
22507 bool
22508 premark_used_types_helper (tree const &type, void *)
22509 {
22510 dw_die_ref die;
22511
22512 die = lookup_type_die (type);
22513 if (die != NULL)
22514 die->die_perennial_p = 1;
22515 return true;
22516 }
22517
22518 /* Helper function of premark_types_used_by_global_vars which gets called
22519 through htab_traverse.
22520
22521 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22522 marked as unused by prune_unused_types. The DIE of the type is marked
22523 only if the global variable using the type will actually be emitted. */
22524
22525 int
22526 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22527 void *)
22528 {
22529 struct types_used_by_vars_entry *entry;
22530 dw_die_ref die;
22531
22532 entry = (struct types_used_by_vars_entry *) *slot;
22533 gcc_assert (entry->type != NULL
22534 && entry->var_decl != NULL);
22535 die = lookup_type_die (entry->type);
22536 if (die)
22537 {
22538 /* Ask cgraph if the global variable really is to be emitted.
22539 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22540 varpool_node *node = varpool_node::get (entry->var_decl);
22541 if (node && node->definition)
22542 {
22543 die->die_perennial_p = 1;
22544 /* Keep the parent DIEs as well. */
22545 while ((die = die->die_parent) && die->die_perennial_p == 0)
22546 die->die_perennial_p = 1;
22547 }
22548 }
22549 return 1;
22550 }
22551
22552 /* Mark all members of used_types_hash as perennial. */
22553
22554 static void
22555 premark_used_types (struct function *fun)
22556 {
22557 if (fun && fun->used_types_hash)
22558 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22559 }
22560
22561 /* Mark all members of types_used_by_vars_entry as perennial. */
22562
22563 static void
22564 premark_types_used_by_global_vars (void)
22565 {
22566 if (types_used_by_vars_hash)
22567 types_used_by_vars_hash
22568 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22569 }
22570
22571 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22572 for CA_LOC call arg loc node. */
22573
22574 static dw_die_ref
22575 gen_call_site_die (tree decl, dw_die_ref subr_die,
22576 struct call_arg_loc_node *ca_loc)
22577 {
22578 dw_die_ref stmt_die = NULL, die;
22579 tree block = ca_loc->block;
22580
22581 while (block
22582 && block != DECL_INITIAL (decl)
22583 && TREE_CODE (block) == BLOCK)
22584 {
22585 stmt_die = BLOCK_DIE (block);
22586 if (stmt_die)
22587 break;
22588 block = BLOCK_SUPERCONTEXT (block);
22589 }
22590 if (stmt_die == NULL)
22591 stmt_die = subr_die;
22592 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22593 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22594 if (ca_loc->tail_call_p)
22595 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22596 if (ca_loc->symbol_ref)
22597 {
22598 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22599 if (tdie)
22600 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22601 else
22602 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22603 false);
22604 }
22605 return die;
22606 }
22607
22608 /* Generate a DIE to represent a declared function (either file-scope or
22609 block-local). */
22610
22611 static void
22612 gen_subprogram_die (tree decl, dw_die_ref context_die)
22613 {
22614 tree origin = decl_ultimate_origin (decl);
22615 dw_die_ref subr_die;
22616 dw_die_ref old_die = lookup_decl_die (decl);
22617
22618 /* This function gets called multiple times for different stages of
22619 the debug process. For example, for func() in this code:
22620
22621 namespace S
22622 {
22623 void func() { ... }
22624 }
22625
22626 ...we get called 4 times. Twice in early debug and twice in
22627 late debug:
22628
22629 Early debug
22630 -----------
22631
22632 1. Once while generating func() within the namespace. This is
22633 the declaration. The declaration bit below is set, as the
22634 context is the namespace.
22635
22636 A new DIE will be generated with DW_AT_declaration set.
22637
22638 2. Once for func() itself. This is the specification. The
22639 declaration bit below is clear as the context is the CU.
22640
22641 We will use the cached DIE from (1) to create a new DIE with
22642 DW_AT_specification pointing to the declaration in (1).
22643
22644 Late debug via rest_of_handle_final()
22645 -------------------------------------
22646
22647 3. Once generating func() within the namespace. This is also the
22648 declaration, as in (1), but this time we will early exit below
22649 as we have a cached DIE and a declaration needs no additional
22650 annotations (no locations), as the source declaration line
22651 info is enough.
22652
22653 4. Once for func() itself. As in (2), this is the specification,
22654 but this time we will re-use the cached DIE, and just annotate
22655 it with the location information that should now be available.
22656
22657 For something without namespaces, but with abstract instances, we
22658 are also called a multiple times:
22659
22660 class Base
22661 {
22662 public:
22663 Base (); // constructor declaration (1)
22664 };
22665
22666 Base::Base () { } // constructor specification (2)
22667
22668 Early debug
22669 -----------
22670
22671 1. Once for the Base() constructor by virtue of it being a
22672 member of the Base class. This is done via
22673 rest_of_type_compilation.
22674
22675 This is a declaration, so a new DIE will be created with
22676 DW_AT_declaration.
22677
22678 2. Once for the Base() constructor definition, but this time
22679 while generating the abstract instance of the base
22680 constructor (__base_ctor) which is being generated via early
22681 debug of reachable functions.
22682
22683 Even though we have a cached version of the declaration (1),
22684 we will create a DW_AT_specification of the declaration DIE
22685 in (1).
22686
22687 3. Once for the __base_ctor itself, but this time, we generate
22688 an DW_AT_abstract_origin version of the DW_AT_specification in
22689 (2).
22690
22691 Late debug via rest_of_handle_final
22692 -----------------------------------
22693
22694 4. One final time for the __base_ctor (which will have a cached
22695 DIE with DW_AT_abstract_origin created in (3). This time,
22696 we will just annotate the location information now
22697 available.
22698 */
22699 int declaration = (current_function_decl != decl
22700 || class_or_namespace_scope_p (context_die));
22701
22702 /* A declaration that has been previously dumped needs no
22703 additional information. */
22704 if (old_die && declaration)
22705 return;
22706
22707 /* Now that the C++ front end lazily declares artificial member fns, we
22708 might need to retrofit the declaration into its class. */
22709 if (!declaration && !origin && !old_die
22710 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22711 && !class_or_namespace_scope_p (context_die)
22712 && debug_info_level > DINFO_LEVEL_TERSE)
22713 old_die = force_decl_die (decl);
22714
22715 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22716 if (origin != NULL)
22717 {
22718 gcc_assert (!declaration || local_scope_p (context_die));
22719
22720 /* Fixup die_parent for the abstract instance of a nested
22721 inline function. */
22722 if (old_die && old_die->die_parent == NULL)
22723 add_child_die (context_die, old_die);
22724
22725 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22726 {
22727 /* If we have a DW_AT_abstract_origin we have a working
22728 cached version. */
22729 subr_die = old_die;
22730 }
22731 else
22732 {
22733 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22734 add_abstract_origin_attribute (subr_die, origin);
22735 /* This is where the actual code for a cloned function is.
22736 Let's emit linkage name attribute for it. This helps
22737 debuggers to e.g, set breakpoints into
22738 constructors/destructors when the user asks "break
22739 K::K". */
22740 add_linkage_name (subr_die, decl);
22741 }
22742 }
22743 /* A cached copy, possibly from early dwarf generation. Reuse as
22744 much as possible. */
22745 else if (old_die)
22746 {
22747 if (!get_AT_flag (old_die, DW_AT_declaration)
22748 /* We can have a normal definition following an inline one in the
22749 case of redefinition of GNU C extern inlines.
22750 It seems reasonable to use AT_specification in this case. */
22751 && !get_AT (old_die, DW_AT_inline))
22752 {
22753 /* Detect and ignore this case, where we are trying to output
22754 something we have already output. */
22755 if (get_AT (old_die, DW_AT_low_pc)
22756 || get_AT (old_die, DW_AT_ranges))
22757 return;
22758
22759 /* If we have no location information, this must be a
22760 partially generated DIE from early dwarf generation.
22761 Fall through and generate it. */
22762 }
22763
22764 /* If the definition comes from the same place as the declaration,
22765 maybe use the old DIE. We always want the DIE for this function
22766 that has the *_pc attributes to be under comp_unit_die so the
22767 debugger can find it. We also need to do this for abstract
22768 instances of inlines, since the spec requires the out-of-line copy
22769 to have the same parent. For local class methods, this doesn't
22770 apply; we just use the old DIE. */
22771 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22772 struct dwarf_file_data * file_index = lookup_filename (s.file);
22773 if ((is_cu_die (old_die->die_parent)
22774 /* This condition fixes the inconsistency/ICE with the
22775 following Fortran test (or some derivative thereof) while
22776 building libgfortran:
22777
22778 module some_m
22779 contains
22780 logical function funky (FLAG)
22781 funky = .true.
22782 end function
22783 end module
22784 */
22785 || (old_die->die_parent
22786 && old_die->die_parent->die_tag == DW_TAG_module)
22787 || context_die == NULL)
22788 && (DECL_ARTIFICIAL (decl)
22789 /* The location attributes may be in the abstract origin
22790 which in the case of LTO might be not available to
22791 look at. */
22792 || get_AT (old_die, DW_AT_abstract_origin)
22793 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22794 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22795 == (unsigned) s.line)
22796 && (!debug_column_info
22797 || s.column == 0
22798 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22799 == (unsigned) s.column)))))
22800 {
22801 subr_die = old_die;
22802
22803 /* Clear out the declaration attribute, but leave the
22804 parameters so they can be augmented with location
22805 information later. Unless this was a declaration, in
22806 which case, wipe out the nameless parameters and recreate
22807 them further down. */
22808 if (remove_AT (subr_die, DW_AT_declaration))
22809 {
22810
22811 remove_AT (subr_die, DW_AT_object_pointer);
22812 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22813 }
22814 }
22815 /* Make a specification pointing to the previously built
22816 declaration. */
22817 else
22818 {
22819 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22820 add_AT_specification (subr_die, old_die);
22821 add_pubname (decl, subr_die);
22822 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22823 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22824 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22825 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22826 if (debug_column_info
22827 && s.column
22828 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22829 != (unsigned) s.column))
22830 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22831
22832 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22833 emit the real type on the definition die. */
22834 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22835 {
22836 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22837 if (die == auto_die || die == decltype_auto_die)
22838 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22839 TYPE_UNQUALIFIED, false, context_die);
22840 }
22841
22842 /* When we process the method declaration, we haven't seen
22843 the out-of-class defaulted definition yet, so we have to
22844 recheck now. */
22845 if ((dwarf_version >= 5 || ! dwarf_strict)
22846 && !get_AT (subr_die, DW_AT_defaulted))
22847 {
22848 int defaulted
22849 = lang_hooks.decls.decl_dwarf_attribute (decl,
22850 DW_AT_defaulted);
22851 if (defaulted != -1)
22852 {
22853 /* Other values must have been handled before. */
22854 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22855 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22856 }
22857 }
22858 }
22859 }
22860 /* Create a fresh DIE for anything else. */
22861 else
22862 {
22863 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22864
22865 if (TREE_PUBLIC (decl))
22866 add_AT_flag (subr_die, DW_AT_external, 1);
22867
22868 add_name_and_src_coords_attributes (subr_die, decl);
22869 add_pubname (decl, subr_die);
22870 if (debug_info_level > DINFO_LEVEL_TERSE)
22871 {
22872 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22873 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22874 TYPE_UNQUALIFIED, false, context_die);
22875 }
22876
22877 add_pure_or_virtual_attribute (subr_die, decl);
22878 if (DECL_ARTIFICIAL (decl))
22879 add_AT_flag (subr_die, DW_AT_artificial, 1);
22880
22881 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22882 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22883
22884 add_alignment_attribute (subr_die, decl);
22885
22886 add_accessibility_attribute (subr_die, decl);
22887 }
22888
22889 /* Unless we have an existing non-declaration DIE, equate the new
22890 DIE. */
22891 if (!old_die || is_declaration_die (old_die))
22892 equate_decl_number_to_die (decl, subr_die);
22893
22894 if (declaration)
22895 {
22896 if (!old_die || !get_AT (old_die, DW_AT_inline))
22897 {
22898 add_AT_flag (subr_die, DW_AT_declaration, 1);
22899
22900 /* If this is an explicit function declaration then generate
22901 a DW_AT_explicit attribute. */
22902 if ((dwarf_version >= 3 || !dwarf_strict)
22903 && lang_hooks.decls.decl_dwarf_attribute (decl,
22904 DW_AT_explicit) == 1)
22905 add_AT_flag (subr_die, DW_AT_explicit, 1);
22906
22907 /* If this is a C++11 deleted special function member then generate
22908 a DW_AT_deleted attribute. */
22909 if ((dwarf_version >= 5 || !dwarf_strict)
22910 && lang_hooks.decls.decl_dwarf_attribute (decl,
22911 DW_AT_deleted) == 1)
22912 add_AT_flag (subr_die, DW_AT_deleted, 1);
22913
22914 /* If this is a C++11 defaulted special function member then
22915 generate a DW_AT_defaulted attribute. */
22916 if (dwarf_version >= 5 || !dwarf_strict)
22917 {
22918 int defaulted
22919 = lang_hooks.decls.decl_dwarf_attribute (decl,
22920 DW_AT_defaulted);
22921 if (defaulted != -1)
22922 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22923 }
22924
22925 /* If this is a C++11 non-static member function with & ref-qualifier
22926 then generate a DW_AT_reference attribute. */
22927 if ((dwarf_version >= 5 || !dwarf_strict)
22928 && lang_hooks.decls.decl_dwarf_attribute (decl,
22929 DW_AT_reference) == 1)
22930 add_AT_flag (subr_die, DW_AT_reference, 1);
22931
22932 /* If this is a C++11 non-static member function with &&
22933 ref-qualifier then generate a DW_AT_reference attribute. */
22934 if ((dwarf_version >= 5 || !dwarf_strict)
22935 && lang_hooks.decls.decl_dwarf_attribute (decl,
22936 DW_AT_rvalue_reference)
22937 == 1)
22938 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22939 }
22940 }
22941 /* For non DECL_EXTERNALs, if range information is available, fill
22942 the DIE with it. */
22943 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22944 {
22945 HOST_WIDE_INT cfa_fb_offset;
22946
22947 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22948
22949 if (!crtl->has_bb_partition)
22950 {
22951 dw_fde_ref fde = fun->fde;
22952 if (fde->dw_fde_begin)
22953 {
22954 /* We have already generated the labels. */
22955 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22956 fde->dw_fde_end, false);
22957 }
22958 else
22959 {
22960 /* Create start/end labels and add the range. */
22961 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22962 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22963 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22964 current_function_funcdef_no);
22965 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22966 current_function_funcdef_no);
22967 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22968 false);
22969 }
22970
22971 #if VMS_DEBUGGING_INFO
22972 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22973 Section 2.3 Prologue and Epilogue Attributes:
22974 When a breakpoint is set on entry to a function, it is generally
22975 desirable for execution to be suspended, not on the very first
22976 instruction of the function, but rather at a point after the
22977 function's frame has been set up, after any language defined local
22978 declaration processing has been completed, and before execution of
22979 the first statement of the function begins. Debuggers generally
22980 cannot properly determine where this point is. Similarly for a
22981 breakpoint set on exit from a function. The prologue and epilogue
22982 attributes allow a compiler to communicate the location(s) to use. */
22983
22984 {
22985 if (fde->dw_fde_vms_end_prologue)
22986 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22987 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22988
22989 if (fde->dw_fde_vms_begin_epilogue)
22990 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22991 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22992 }
22993 #endif
22994
22995 }
22996 else
22997 {
22998 /* Generate pubnames entries for the split function code ranges. */
22999 dw_fde_ref fde = fun->fde;
23000
23001 if (fde->dw_fde_second_begin)
23002 {
23003 if (dwarf_version >= 3 || !dwarf_strict)
23004 {
23005 /* We should use ranges for non-contiguous code section
23006 addresses. Use the actual code range for the initial
23007 section, since the HOT/COLD labels might precede an
23008 alignment offset. */
23009 bool range_list_added = false;
23010 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23011 fde->dw_fde_end, &range_list_added,
23012 false);
23013 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23014 fde->dw_fde_second_end,
23015 &range_list_added, false);
23016 if (range_list_added)
23017 add_ranges (NULL);
23018 }
23019 else
23020 {
23021 /* There is no real support in DW2 for this .. so we make
23022 a work-around. First, emit the pub name for the segment
23023 containing the function label. Then make and emit a
23024 simplified subprogram DIE for the second segment with the
23025 name pre-fixed by __hot/cold_sect_of_. We use the same
23026 linkage name for the second die so that gdb will find both
23027 sections when given "b foo". */
23028 const char *name = NULL;
23029 tree decl_name = DECL_NAME (decl);
23030 dw_die_ref seg_die;
23031
23032 /* Do the 'primary' section. */
23033 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23034 fde->dw_fde_end, false);
23035
23036 /* Build a minimal DIE for the secondary section. */
23037 seg_die = new_die (DW_TAG_subprogram,
23038 subr_die->die_parent, decl);
23039
23040 if (TREE_PUBLIC (decl))
23041 add_AT_flag (seg_die, DW_AT_external, 1);
23042
23043 if (decl_name != NULL
23044 && IDENTIFIER_POINTER (decl_name) != NULL)
23045 {
23046 name = dwarf2_name (decl, 1);
23047 if (! DECL_ARTIFICIAL (decl))
23048 add_src_coords_attributes (seg_die, decl);
23049
23050 add_linkage_name (seg_die, decl);
23051 }
23052 gcc_assert (name != NULL);
23053 add_pure_or_virtual_attribute (seg_die, decl);
23054 if (DECL_ARTIFICIAL (decl))
23055 add_AT_flag (seg_die, DW_AT_artificial, 1);
23056
23057 name = concat ("__second_sect_of_", name, NULL);
23058 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23059 fde->dw_fde_second_end, false);
23060 add_name_attribute (seg_die, name);
23061 if (want_pubnames ())
23062 add_pubname_string (name, seg_die);
23063 }
23064 }
23065 else
23066 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23067 false);
23068 }
23069
23070 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23071
23072 /* We define the "frame base" as the function's CFA. This is more
23073 convenient for several reasons: (1) It's stable across the prologue
23074 and epilogue, which makes it better than just a frame pointer,
23075 (2) With dwarf3, there exists a one-byte encoding that allows us
23076 to reference the .debug_frame data by proxy, but failing that,
23077 (3) We can at least reuse the code inspection and interpretation
23078 code that determines the CFA position at various points in the
23079 function. */
23080 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23081 {
23082 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23083 add_AT_loc (subr_die, DW_AT_frame_base, op);
23084 }
23085 else
23086 {
23087 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23088 if (list->dw_loc_next)
23089 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23090 else
23091 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23092 }
23093
23094 /* Compute a displacement from the "steady-state frame pointer" to
23095 the CFA. The former is what all stack slots and argument slots
23096 will reference in the rtl; the latter is what we've told the
23097 debugger about. We'll need to adjust all frame_base references
23098 by this displacement. */
23099 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23100
23101 if (fun->static_chain_decl)
23102 {
23103 /* DWARF requires here a location expression that computes the
23104 address of the enclosing subprogram's frame base. The machinery
23105 in tree-nested.c is supposed to store this specific address in the
23106 last field of the FRAME record. */
23107 const tree frame_type
23108 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23109 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23110
23111 tree fb_expr
23112 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23113 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23114 fb_expr, fb_decl, NULL_TREE);
23115
23116 add_AT_location_description (subr_die, DW_AT_static_link,
23117 loc_list_from_tree (fb_expr, 0, NULL));
23118 }
23119
23120 resolve_variable_values ();
23121 }
23122
23123 /* Generate child dies for template paramaters. */
23124 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23125 gen_generic_params_dies (decl);
23126
23127 /* Now output descriptions of the arguments for this function. This gets
23128 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23129 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23130 `...' at the end of the formal parameter list. In order to find out if
23131 there was a trailing ellipsis or not, we must instead look at the type
23132 associated with the FUNCTION_DECL. This will be a node of type
23133 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23134 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23135 an ellipsis at the end. */
23136
23137 /* In the case where we are describing a mere function declaration, all we
23138 need to do here (and all we *can* do here) is to describe the *types* of
23139 its formal parameters. */
23140 if (debug_info_level <= DINFO_LEVEL_TERSE)
23141 ;
23142 else if (declaration)
23143 gen_formal_types_die (decl, subr_die);
23144 else
23145 {
23146 /* Generate DIEs to represent all known formal parameters. */
23147 tree parm = DECL_ARGUMENTS (decl);
23148 tree generic_decl = early_dwarf
23149 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23150 tree generic_decl_parm = generic_decl
23151 ? DECL_ARGUMENTS (generic_decl)
23152 : NULL;
23153
23154 /* Now we want to walk the list of parameters of the function and
23155 emit their relevant DIEs.
23156
23157 We consider the case of DECL being an instance of a generic function
23158 as well as it being a normal function.
23159
23160 If DECL is an instance of a generic function we walk the
23161 parameters of the generic function declaration _and_ the parameters of
23162 DECL itself. This is useful because we want to emit specific DIEs for
23163 function parameter packs and those are declared as part of the
23164 generic function declaration. In that particular case,
23165 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23166 That DIE has children DIEs representing the set of arguments
23167 of the pack. Note that the set of pack arguments can be empty.
23168 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23169 children DIE.
23170
23171 Otherwise, we just consider the parameters of DECL. */
23172 while (generic_decl_parm || parm)
23173 {
23174 if (generic_decl_parm
23175 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23176 gen_formal_parameter_pack_die (generic_decl_parm,
23177 parm, subr_die,
23178 &parm);
23179 else if (parm && !POINTER_BOUNDS_P (parm))
23180 {
23181 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23182
23183 if (early_dwarf
23184 && parm == DECL_ARGUMENTS (decl)
23185 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23186 && parm_die
23187 && (dwarf_version >= 3 || !dwarf_strict))
23188 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23189
23190 parm = DECL_CHAIN (parm);
23191 }
23192 else if (parm)
23193 parm = DECL_CHAIN (parm);
23194
23195 if (generic_decl_parm)
23196 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23197 }
23198
23199 /* Decide whether we need an unspecified_parameters DIE at the end.
23200 There are 2 more cases to do this for: 1) the ansi ... declaration -
23201 this is detectable when the end of the arg list is not a
23202 void_type_node 2) an unprototyped function declaration (not a
23203 definition). This just means that we have no info about the
23204 parameters at all. */
23205 if (early_dwarf)
23206 {
23207 if (prototype_p (TREE_TYPE (decl)))
23208 {
23209 /* This is the prototyped case, check for.... */
23210 if (stdarg_p (TREE_TYPE (decl)))
23211 gen_unspecified_parameters_die (decl, subr_die);
23212 }
23213 else if (DECL_INITIAL (decl) == NULL_TREE)
23214 gen_unspecified_parameters_die (decl, subr_die);
23215 }
23216 }
23217
23218 if (subr_die != old_die)
23219 /* Add the calling convention attribute if requested. */
23220 add_calling_convention_attribute (subr_die, decl);
23221
23222 /* Output Dwarf info for all of the stuff within the body of the function
23223 (if it has one - it may be just a declaration).
23224
23225 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23226 a function. This BLOCK actually represents the outermost binding contour
23227 for the function, i.e. the contour in which the function's formal
23228 parameters and labels get declared. Curiously, it appears that the front
23229 end doesn't actually put the PARM_DECL nodes for the current function onto
23230 the BLOCK_VARS list for this outer scope, but are strung off of the
23231 DECL_ARGUMENTS list for the function instead.
23232
23233 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23234 the LABEL_DECL nodes for the function however, and we output DWARF info
23235 for those in decls_for_scope. Just within the `outer_scope' there will be
23236 a BLOCK node representing the function's outermost pair of curly braces,
23237 and any blocks used for the base and member initializers of a C++
23238 constructor function. */
23239 tree outer_scope = DECL_INITIAL (decl);
23240 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23241 {
23242 int call_site_note_count = 0;
23243 int tail_call_site_note_count = 0;
23244
23245 /* Emit a DW_TAG_variable DIE for a named return value. */
23246 if (DECL_NAME (DECL_RESULT (decl)))
23247 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23248
23249 /* The first time through decls_for_scope we will generate the
23250 DIEs for the locals. The second time, we fill in the
23251 location info. */
23252 decls_for_scope (outer_scope, subr_die);
23253
23254 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23255 {
23256 struct call_arg_loc_node *ca_loc;
23257 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23258 {
23259 dw_die_ref die = NULL;
23260 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23261 rtx arg, next_arg;
23262
23263 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23264 ? XEXP (ca_loc->call_arg_loc_note, 0)
23265 : NULL_RTX);
23266 arg; arg = next_arg)
23267 {
23268 dw_loc_descr_ref reg, val;
23269 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23270 dw_die_ref cdie, tdie = NULL;
23271
23272 next_arg = XEXP (arg, 1);
23273 if (REG_P (XEXP (XEXP (arg, 0), 0))
23274 && next_arg
23275 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23276 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23277 && REGNO (XEXP (XEXP (arg, 0), 0))
23278 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23279 next_arg = XEXP (next_arg, 1);
23280 if (mode == VOIDmode)
23281 {
23282 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23283 if (mode == VOIDmode)
23284 mode = GET_MODE (XEXP (arg, 0));
23285 }
23286 if (mode == VOIDmode || mode == BLKmode)
23287 continue;
23288 /* Get dynamic information about call target only if we
23289 have no static information: we cannot generate both
23290 DW_AT_call_origin and DW_AT_call_target
23291 attributes. */
23292 if (ca_loc->symbol_ref == NULL_RTX)
23293 {
23294 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23295 {
23296 tloc = XEXP (XEXP (arg, 0), 1);
23297 continue;
23298 }
23299 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23300 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23301 {
23302 tlocc = XEXP (XEXP (arg, 0), 1);
23303 continue;
23304 }
23305 }
23306 reg = NULL;
23307 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23308 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23309 VAR_INIT_STATUS_INITIALIZED);
23310 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23311 {
23312 rtx mem = XEXP (XEXP (arg, 0), 0);
23313 reg = mem_loc_descriptor (XEXP (mem, 0),
23314 get_address_mode (mem),
23315 GET_MODE (mem),
23316 VAR_INIT_STATUS_INITIALIZED);
23317 }
23318 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23319 == DEBUG_PARAMETER_REF)
23320 {
23321 tree tdecl
23322 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23323 tdie = lookup_decl_die (tdecl);
23324 if (tdie == NULL)
23325 continue;
23326 }
23327 else
23328 continue;
23329 if (reg == NULL
23330 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23331 != DEBUG_PARAMETER_REF)
23332 continue;
23333 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23334 VOIDmode,
23335 VAR_INIT_STATUS_INITIALIZED);
23336 if (val == NULL)
23337 continue;
23338 if (die == NULL)
23339 die = gen_call_site_die (decl, subr_die, ca_loc);
23340 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23341 NULL_TREE);
23342 if (reg != NULL)
23343 add_AT_loc (cdie, DW_AT_location, reg);
23344 else if (tdie != NULL)
23345 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23346 tdie);
23347 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23348 if (next_arg != XEXP (arg, 1))
23349 {
23350 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23351 if (mode == VOIDmode)
23352 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23353 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23354 0), 1),
23355 mode, VOIDmode,
23356 VAR_INIT_STATUS_INITIALIZED);
23357 if (val != NULL)
23358 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23359 val);
23360 }
23361 }
23362 if (die == NULL
23363 && (ca_loc->symbol_ref || tloc))
23364 die = gen_call_site_die (decl, subr_die, ca_loc);
23365 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23366 {
23367 dw_loc_descr_ref tval = NULL;
23368
23369 if (tloc != NULL_RTX)
23370 tval = mem_loc_descriptor (tloc,
23371 GET_MODE (tloc) == VOIDmode
23372 ? Pmode : GET_MODE (tloc),
23373 VOIDmode,
23374 VAR_INIT_STATUS_INITIALIZED);
23375 if (tval)
23376 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23377 else if (tlocc != NULL_RTX)
23378 {
23379 tval = mem_loc_descriptor (tlocc,
23380 GET_MODE (tlocc) == VOIDmode
23381 ? Pmode : GET_MODE (tlocc),
23382 VOIDmode,
23383 VAR_INIT_STATUS_INITIALIZED);
23384 if (tval)
23385 add_AT_loc (die,
23386 dwarf_AT (DW_AT_call_target_clobbered),
23387 tval);
23388 }
23389 }
23390 if (die != NULL)
23391 {
23392 call_site_note_count++;
23393 if (ca_loc->tail_call_p)
23394 tail_call_site_note_count++;
23395 }
23396 }
23397 }
23398 call_arg_locations = NULL;
23399 call_arg_loc_last = NULL;
23400 if (tail_call_site_count >= 0
23401 && tail_call_site_count == tail_call_site_note_count
23402 && (!dwarf_strict || dwarf_version >= 5))
23403 {
23404 if (call_site_count >= 0
23405 && call_site_count == call_site_note_count)
23406 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23407 else
23408 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23409 }
23410 call_site_count = -1;
23411 tail_call_site_count = -1;
23412 }
23413
23414 /* Mark used types after we have created DIEs for the functions scopes. */
23415 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23416 }
23417
23418 /* Returns a hash value for X (which really is a die_struct). */
23419
23420 hashval_t
23421 block_die_hasher::hash (die_struct *d)
23422 {
23423 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23424 }
23425
23426 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23427 as decl_id and die_parent of die_struct Y. */
23428
23429 bool
23430 block_die_hasher::equal (die_struct *x, die_struct *y)
23431 {
23432 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23433 }
23434
23435 /* Hold information about markers for inlined entry points. */
23436 struct GTY ((for_user)) inline_entry_data
23437 {
23438 /* The block that's the inlined_function_outer_scope for an inlined
23439 function. */
23440 tree block;
23441
23442 /* The label at the inlined entry point. */
23443 const char *label_pfx;
23444 unsigned int label_num;
23445
23446 /* The view number to be used as the inlined entry point. */
23447 var_loc_view view;
23448 };
23449
23450 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23451 {
23452 typedef tree compare_type;
23453 static inline hashval_t hash (const inline_entry_data *);
23454 static inline bool equal (const inline_entry_data *, const_tree);
23455 };
23456
23457 /* Hash table routines for inline_entry_data. */
23458
23459 inline hashval_t
23460 inline_entry_data_hasher::hash (const inline_entry_data *data)
23461 {
23462 return htab_hash_pointer (data->block);
23463 }
23464
23465 inline bool
23466 inline_entry_data_hasher::equal (const inline_entry_data *data,
23467 const_tree block)
23468 {
23469 return data->block == block;
23470 }
23471
23472 /* Inlined entry points pending DIE creation in this compilation unit. */
23473
23474 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23475
23476
23477 /* Return TRUE if DECL, which may have been previously generated as
23478 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23479 true if decl (or its origin) is either an extern declaration or a
23480 class/namespace scoped declaration.
23481
23482 The declare_in_namespace support causes us to get two DIEs for one
23483 variable, both of which are declarations. We want to avoid
23484 considering one to be a specification, so we must test for
23485 DECLARATION and DW_AT_declaration. */
23486 static inline bool
23487 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23488 {
23489 return (old_die && TREE_STATIC (decl) && !declaration
23490 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23491 }
23492
23493 /* Return true if DECL is a local static. */
23494
23495 static inline bool
23496 local_function_static (tree decl)
23497 {
23498 gcc_assert (VAR_P (decl));
23499 return TREE_STATIC (decl)
23500 && DECL_CONTEXT (decl)
23501 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23502 }
23503
23504 /* Generate a DIE to represent a declared data object.
23505 Either DECL or ORIGIN must be non-null. */
23506
23507 static void
23508 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23509 {
23510 HOST_WIDE_INT off = 0;
23511 tree com_decl;
23512 tree decl_or_origin = decl ? decl : origin;
23513 tree ultimate_origin;
23514 dw_die_ref var_die;
23515 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23516 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23517 || class_or_namespace_scope_p (context_die));
23518 bool specialization_p = false;
23519 bool no_linkage_name = false;
23520
23521 /* While C++ inline static data members have definitions inside of the
23522 class, force the first DIE to be a declaration, then let gen_member_die
23523 reparent it to the class context and call gen_variable_die again
23524 to create the outside of the class DIE for the definition. */
23525 if (!declaration
23526 && old_die == NULL
23527 && decl
23528 && DECL_CONTEXT (decl)
23529 && TYPE_P (DECL_CONTEXT (decl))
23530 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23531 {
23532 declaration = true;
23533 if (dwarf_version < 5)
23534 no_linkage_name = true;
23535 }
23536
23537 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23538 if (decl || ultimate_origin)
23539 origin = ultimate_origin;
23540 com_decl = fortran_common (decl_or_origin, &off);
23541
23542 /* Symbol in common gets emitted as a child of the common block, in the form
23543 of a data member. */
23544 if (com_decl)
23545 {
23546 dw_die_ref com_die;
23547 dw_loc_list_ref loc = NULL;
23548 die_node com_die_arg;
23549
23550 var_die = lookup_decl_die (decl_or_origin);
23551 if (var_die)
23552 {
23553 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23554 {
23555 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23556 if (loc)
23557 {
23558 if (off)
23559 {
23560 /* Optimize the common case. */
23561 if (single_element_loc_list_p (loc)
23562 && loc->expr->dw_loc_opc == DW_OP_addr
23563 && loc->expr->dw_loc_next == NULL
23564 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23565 == SYMBOL_REF)
23566 {
23567 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23568 loc->expr->dw_loc_oprnd1.v.val_addr
23569 = plus_constant (GET_MODE (x), x , off);
23570 }
23571 else
23572 loc_list_plus_const (loc, off);
23573 }
23574 add_AT_location_description (var_die, DW_AT_location, loc);
23575 remove_AT (var_die, DW_AT_declaration);
23576 }
23577 }
23578 return;
23579 }
23580
23581 if (common_block_die_table == NULL)
23582 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23583
23584 com_die_arg.decl_id = DECL_UID (com_decl);
23585 com_die_arg.die_parent = context_die;
23586 com_die = common_block_die_table->find (&com_die_arg);
23587 if (! early_dwarf)
23588 loc = loc_list_from_tree (com_decl, 2, NULL);
23589 if (com_die == NULL)
23590 {
23591 const char *cnam
23592 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23593 die_node **slot;
23594
23595 com_die = new_die (DW_TAG_common_block, context_die, decl);
23596 add_name_and_src_coords_attributes (com_die, com_decl);
23597 if (loc)
23598 {
23599 add_AT_location_description (com_die, DW_AT_location, loc);
23600 /* Avoid sharing the same loc descriptor between
23601 DW_TAG_common_block and DW_TAG_variable. */
23602 loc = loc_list_from_tree (com_decl, 2, NULL);
23603 }
23604 else if (DECL_EXTERNAL (decl_or_origin))
23605 add_AT_flag (com_die, DW_AT_declaration, 1);
23606 if (want_pubnames ())
23607 add_pubname_string (cnam, com_die); /* ??? needed? */
23608 com_die->decl_id = DECL_UID (com_decl);
23609 slot = common_block_die_table->find_slot (com_die, INSERT);
23610 *slot = com_die;
23611 }
23612 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23613 {
23614 add_AT_location_description (com_die, DW_AT_location, loc);
23615 loc = loc_list_from_tree (com_decl, 2, NULL);
23616 remove_AT (com_die, DW_AT_declaration);
23617 }
23618 var_die = new_die (DW_TAG_variable, com_die, decl);
23619 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23620 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23621 decl_quals (decl_or_origin), false,
23622 context_die);
23623 add_alignment_attribute (var_die, decl);
23624 add_AT_flag (var_die, DW_AT_external, 1);
23625 if (loc)
23626 {
23627 if (off)
23628 {
23629 /* Optimize the common case. */
23630 if (single_element_loc_list_p (loc)
23631 && loc->expr->dw_loc_opc == DW_OP_addr
23632 && loc->expr->dw_loc_next == NULL
23633 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23634 {
23635 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23636 loc->expr->dw_loc_oprnd1.v.val_addr
23637 = plus_constant (GET_MODE (x), x, off);
23638 }
23639 else
23640 loc_list_plus_const (loc, off);
23641 }
23642 add_AT_location_description (var_die, DW_AT_location, loc);
23643 }
23644 else if (DECL_EXTERNAL (decl_or_origin))
23645 add_AT_flag (var_die, DW_AT_declaration, 1);
23646 if (decl)
23647 equate_decl_number_to_die (decl, var_die);
23648 return;
23649 }
23650
23651 if (old_die)
23652 {
23653 if (declaration)
23654 {
23655 /* A declaration that has been previously dumped, needs no
23656 further annotations, since it doesn't need location on
23657 the second pass. */
23658 return;
23659 }
23660 else if (decl_will_get_specification_p (old_die, decl, declaration)
23661 && !get_AT (old_die, DW_AT_specification))
23662 {
23663 /* Fall-thru so we can make a new variable die along with a
23664 DW_AT_specification. */
23665 }
23666 else if (origin && old_die->die_parent != context_die)
23667 {
23668 /* If we will be creating an inlined instance, we need a
23669 new DIE that will get annotated with
23670 DW_AT_abstract_origin. */
23671 gcc_assert (!DECL_ABSTRACT_P (decl));
23672 }
23673 else
23674 {
23675 /* If a DIE was dumped early, it still needs location info.
23676 Skip to where we fill the location bits. */
23677 var_die = old_die;
23678
23679 /* ??? In LTRANS we cannot annotate early created variably
23680 modified type DIEs without copying them and adjusting all
23681 references to them. Thus we dumped them again. Also add a
23682 reference to them but beware of -g0 compile and -g link
23683 in which case the reference will be already present. */
23684 tree type = TREE_TYPE (decl_or_origin);
23685 if (in_lto_p
23686 && ! get_AT (var_die, DW_AT_type)
23687 && variably_modified_type_p
23688 (type, decl_function_context (decl_or_origin)))
23689 {
23690 if (decl_by_reference_p (decl_or_origin))
23691 add_type_attribute (var_die, TREE_TYPE (type),
23692 TYPE_UNQUALIFIED, false, context_die);
23693 else
23694 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23695 false, context_die);
23696 }
23697
23698 goto gen_variable_die_location;
23699 }
23700 }
23701
23702 /* For static data members, the declaration in the class is supposed
23703 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23704 also in DWARF2; the specification should still be DW_TAG_variable
23705 referencing the DW_TAG_member DIE. */
23706 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23707 var_die = new_die (DW_TAG_member, context_die, decl);
23708 else
23709 var_die = new_die (DW_TAG_variable, context_die, decl);
23710
23711 if (origin != NULL)
23712 add_abstract_origin_attribute (var_die, origin);
23713
23714 /* Loop unrolling can create multiple blocks that refer to the same
23715 static variable, so we must test for the DW_AT_declaration flag.
23716
23717 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23718 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23719 sharing them.
23720
23721 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23722 else if (decl_will_get_specification_p (old_die, decl, declaration))
23723 {
23724 /* This is a definition of a C++ class level static. */
23725 add_AT_specification (var_die, old_die);
23726 specialization_p = true;
23727 if (DECL_NAME (decl))
23728 {
23729 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23730 struct dwarf_file_data * file_index = lookup_filename (s.file);
23731
23732 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23733 add_AT_file (var_die, DW_AT_decl_file, file_index);
23734
23735 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23736 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23737
23738 if (debug_column_info
23739 && s.column
23740 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23741 != (unsigned) s.column))
23742 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23743
23744 if (old_die->die_tag == DW_TAG_member)
23745 add_linkage_name (var_die, decl);
23746 }
23747 }
23748 else
23749 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23750
23751 if ((origin == NULL && !specialization_p)
23752 || (origin != NULL
23753 && !DECL_ABSTRACT_P (decl_or_origin)
23754 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23755 decl_function_context
23756 (decl_or_origin))))
23757 {
23758 tree type = TREE_TYPE (decl_or_origin);
23759
23760 if (decl_by_reference_p (decl_or_origin))
23761 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23762 context_die);
23763 else
23764 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23765 context_die);
23766 }
23767
23768 if (origin == NULL && !specialization_p)
23769 {
23770 if (TREE_PUBLIC (decl))
23771 add_AT_flag (var_die, DW_AT_external, 1);
23772
23773 if (DECL_ARTIFICIAL (decl))
23774 add_AT_flag (var_die, DW_AT_artificial, 1);
23775
23776 add_alignment_attribute (var_die, decl);
23777
23778 add_accessibility_attribute (var_die, decl);
23779 }
23780
23781 if (declaration)
23782 add_AT_flag (var_die, DW_AT_declaration, 1);
23783
23784 if (decl && (DECL_ABSTRACT_P (decl)
23785 || !old_die || is_declaration_die (old_die)))
23786 equate_decl_number_to_die (decl, var_die);
23787
23788 gen_variable_die_location:
23789 if (! declaration
23790 && (! DECL_ABSTRACT_P (decl_or_origin)
23791 /* Local static vars are shared between all clones/inlines,
23792 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23793 already set. */
23794 || (VAR_P (decl_or_origin)
23795 && TREE_STATIC (decl_or_origin)
23796 && DECL_RTL_SET_P (decl_or_origin))))
23797 {
23798 if (early_dwarf)
23799 add_pubname (decl_or_origin, var_die);
23800 else
23801 add_location_or_const_value_attribute (var_die, decl_or_origin,
23802 decl == NULL);
23803 }
23804 else
23805 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23806
23807 if ((dwarf_version >= 4 || !dwarf_strict)
23808 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23809 DW_AT_const_expr) == 1
23810 && !get_AT (var_die, DW_AT_const_expr)
23811 && !specialization_p)
23812 add_AT_flag (var_die, DW_AT_const_expr, 1);
23813
23814 if (!dwarf_strict)
23815 {
23816 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23817 DW_AT_inline);
23818 if (inl != -1
23819 && !get_AT (var_die, DW_AT_inline)
23820 && !specialization_p)
23821 add_AT_unsigned (var_die, DW_AT_inline, inl);
23822 }
23823 }
23824
23825 /* Generate a DIE to represent a named constant. */
23826
23827 static void
23828 gen_const_die (tree decl, dw_die_ref context_die)
23829 {
23830 dw_die_ref const_die;
23831 tree type = TREE_TYPE (decl);
23832
23833 const_die = lookup_decl_die (decl);
23834 if (const_die)
23835 return;
23836
23837 const_die = new_die (DW_TAG_constant, context_die, decl);
23838 equate_decl_number_to_die (decl, const_die);
23839 add_name_and_src_coords_attributes (const_die, decl);
23840 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23841 if (TREE_PUBLIC (decl))
23842 add_AT_flag (const_die, DW_AT_external, 1);
23843 if (DECL_ARTIFICIAL (decl))
23844 add_AT_flag (const_die, DW_AT_artificial, 1);
23845 tree_add_const_value_attribute_for_decl (const_die, decl);
23846 }
23847
23848 /* Generate a DIE to represent a label identifier. */
23849
23850 static void
23851 gen_label_die (tree decl, dw_die_ref context_die)
23852 {
23853 tree origin = decl_ultimate_origin (decl);
23854 dw_die_ref lbl_die = lookup_decl_die (decl);
23855 rtx insn;
23856 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23857
23858 if (!lbl_die)
23859 {
23860 lbl_die = new_die (DW_TAG_label, context_die, decl);
23861 equate_decl_number_to_die (decl, lbl_die);
23862
23863 if (origin != NULL)
23864 add_abstract_origin_attribute (lbl_die, origin);
23865 else
23866 add_name_and_src_coords_attributes (lbl_die, decl);
23867 }
23868
23869 if (DECL_ABSTRACT_P (decl))
23870 equate_decl_number_to_die (decl, lbl_die);
23871 else if (! early_dwarf)
23872 {
23873 insn = DECL_RTL_IF_SET (decl);
23874
23875 /* Deleted labels are programmer specified labels which have been
23876 eliminated because of various optimizations. We still emit them
23877 here so that it is possible to put breakpoints on them. */
23878 if (insn
23879 && (LABEL_P (insn)
23880 || ((NOTE_P (insn)
23881 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23882 {
23883 /* When optimization is enabled (via -O) some parts of the compiler
23884 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23885 represent source-level labels which were explicitly declared by
23886 the user. This really shouldn't be happening though, so catch
23887 it if it ever does happen. */
23888 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23889
23890 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23891 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23892 }
23893 else if (insn
23894 && NOTE_P (insn)
23895 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23896 && CODE_LABEL_NUMBER (insn) != -1)
23897 {
23898 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23899 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23900 }
23901 }
23902 }
23903
23904 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23905 attributes to the DIE for a block STMT, to describe where the inlined
23906 function was called from. This is similar to add_src_coords_attributes. */
23907
23908 static inline void
23909 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23910 {
23911 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23912
23913 if (dwarf_version >= 3 || !dwarf_strict)
23914 {
23915 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23916 add_AT_unsigned (die, DW_AT_call_line, s.line);
23917 if (debug_column_info && s.column)
23918 add_AT_unsigned (die, DW_AT_call_column, s.column);
23919 }
23920 }
23921
23922
23923 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23924 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23925
23926 static inline void
23927 add_high_low_attributes (tree stmt, dw_die_ref die)
23928 {
23929 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23930
23931 if (inline_entry_data **iedp
23932 = !inline_entry_data_table ? NULL
23933 : inline_entry_data_table->find_slot_with_hash (stmt,
23934 htab_hash_pointer (stmt),
23935 NO_INSERT))
23936 {
23937 inline_entry_data *ied = *iedp;
23938 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23939 gcc_assert (debug_inline_points);
23940 gcc_assert (inlined_function_outer_scope_p (stmt));
23941
23942 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23943 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23944
23945 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23946 && !dwarf_strict)
23947 {
23948 if (!output_asm_line_debug_info ())
23949 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23950 else
23951 {
23952 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23953 /* FIXME: this will resolve to a small number. Could we
23954 possibly emit smaller data? Ideally we'd emit a
23955 uleb128, but that would make the size of DIEs
23956 impossible for the compiler to compute, since it's
23957 the assembler that computes the value of the view
23958 label in this case. Ideally, we'd have a single form
23959 encompassing both the address and the view, and
23960 indirecting them through a table might make things
23961 easier, but even that would be more wasteful,
23962 space-wise, than what we have now. */
23963 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23964 }
23965 }
23966
23967 inline_entry_data_table->clear_slot (iedp);
23968 }
23969
23970 if (BLOCK_FRAGMENT_CHAIN (stmt)
23971 && (dwarf_version >= 3 || !dwarf_strict))
23972 {
23973 tree chain, superblock = NULL_TREE;
23974 dw_die_ref pdie;
23975 dw_attr_node *attr = NULL;
23976
23977 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
23978 {
23979 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23980 BLOCK_NUMBER (stmt));
23981 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23982 }
23983
23984 /* Optimize duplicate .debug_ranges lists or even tails of
23985 lists. If this BLOCK has same ranges as its supercontext,
23986 lookup DW_AT_ranges attribute in the supercontext (and
23987 recursively so), verify that the ranges_table contains the
23988 right values and use it instead of adding a new .debug_range. */
23989 for (chain = stmt, pdie = die;
23990 BLOCK_SAME_RANGE (chain);
23991 chain = BLOCK_SUPERCONTEXT (chain))
23992 {
23993 dw_attr_node *new_attr;
23994
23995 pdie = pdie->die_parent;
23996 if (pdie == NULL)
23997 break;
23998 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23999 break;
24000 new_attr = get_AT (pdie, DW_AT_ranges);
24001 if (new_attr == NULL
24002 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24003 break;
24004 attr = new_attr;
24005 superblock = BLOCK_SUPERCONTEXT (chain);
24006 }
24007 if (attr != NULL
24008 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24009 == BLOCK_NUMBER (superblock))
24010 && BLOCK_FRAGMENT_CHAIN (superblock))
24011 {
24012 unsigned long off = attr->dw_attr_val.v.val_offset;
24013 unsigned long supercnt = 0, thiscnt = 0;
24014 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24015 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24016 {
24017 ++supercnt;
24018 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24019 == BLOCK_NUMBER (chain));
24020 }
24021 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24022 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24023 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24024 ++thiscnt;
24025 gcc_assert (supercnt >= thiscnt);
24026 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24027 false);
24028 note_rnglist_head (off + supercnt - thiscnt);
24029 return;
24030 }
24031
24032 unsigned int offset = add_ranges (stmt, true);
24033 add_AT_range_list (die, DW_AT_ranges, offset, false);
24034 note_rnglist_head (offset);
24035
24036 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24037 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24038 do
24039 {
24040 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24041 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24042 chain = BLOCK_FRAGMENT_CHAIN (chain);
24043 }
24044 while (chain);
24045 add_ranges (NULL);
24046 }
24047 else
24048 {
24049 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24050 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24051 BLOCK_NUMBER (stmt));
24052 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24053 BLOCK_NUMBER (stmt));
24054 add_AT_low_high_pc (die, label, label_high, false);
24055 }
24056 }
24057
24058 /* Generate a DIE for a lexical block. */
24059
24060 static void
24061 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24062 {
24063 dw_die_ref old_die = BLOCK_DIE (stmt);
24064 dw_die_ref stmt_die = NULL;
24065 if (!old_die)
24066 {
24067 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24068 BLOCK_DIE (stmt) = stmt_die;
24069 }
24070
24071 if (BLOCK_ABSTRACT (stmt))
24072 {
24073 if (old_die)
24074 {
24075 /* This must have been generated early and it won't even
24076 need location information since it's a DW_AT_inline
24077 function. */
24078 if (flag_checking)
24079 for (dw_die_ref c = context_die; c; c = c->die_parent)
24080 if (c->die_tag == DW_TAG_inlined_subroutine
24081 || c->die_tag == DW_TAG_subprogram)
24082 {
24083 gcc_assert (get_AT (c, DW_AT_inline));
24084 break;
24085 }
24086 return;
24087 }
24088 }
24089 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
24090 {
24091 /* If this is an inlined instance, create a new lexical die for
24092 anything below to attach DW_AT_abstract_origin to. */
24093 if (old_die)
24094 {
24095 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24096 BLOCK_DIE (stmt) = stmt_die;
24097 old_die = NULL;
24098 }
24099
24100 tree origin = block_ultimate_origin (stmt);
24101 if (origin != NULL_TREE && origin != stmt)
24102 add_abstract_origin_attribute (stmt_die, origin);
24103 }
24104
24105 if (old_die)
24106 stmt_die = old_die;
24107
24108 /* A non abstract block whose blocks have already been reordered
24109 should have the instruction range for this block. If so, set the
24110 high/low attributes. */
24111 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
24112 {
24113 gcc_assert (stmt_die);
24114 add_high_low_attributes (stmt, stmt_die);
24115 }
24116
24117 decls_for_scope (stmt, stmt_die);
24118 }
24119
24120 /* Generate a DIE for an inlined subprogram. */
24121
24122 static void
24123 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24124 {
24125 tree decl;
24126
24127 /* The instance of function that is effectively being inlined shall not
24128 be abstract. */
24129 gcc_assert (! BLOCK_ABSTRACT (stmt));
24130
24131 decl = block_ultimate_origin (stmt);
24132
24133 /* Make sure any inlined functions are known to be inlineable. */
24134 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24135 || cgraph_function_possibly_inlined_p (decl));
24136
24137 if (! BLOCK_ABSTRACT (stmt))
24138 {
24139 dw_die_ref subr_die
24140 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24141
24142 if (call_arg_locations || debug_inline_points)
24143 BLOCK_DIE (stmt) = subr_die;
24144 add_abstract_origin_attribute (subr_die, decl);
24145 if (TREE_ASM_WRITTEN (stmt))
24146 add_high_low_attributes (stmt, subr_die);
24147 add_call_src_coords_attributes (stmt, subr_die);
24148
24149 decls_for_scope (stmt, subr_die);
24150 }
24151 }
24152
24153 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24154 the comment for VLR_CONTEXT. */
24155
24156 static void
24157 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24158 {
24159 dw_die_ref decl_die;
24160
24161 if (TREE_TYPE (decl) == error_mark_node)
24162 return;
24163
24164 decl_die = new_die (DW_TAG_member, context_die, decl);
24165 add_name_and_src_coords_attributes (decl_die, decl);
24166 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24167 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24168 context_die);
24169
24170 if (DECL_BIT_FIELD_TYPE (decl))
24171 {
24172 add_byte_size_attribute (decl_die, decl);
24173 add_bit_size_attribute (decl_die, decl);
24174 add_bit_offset_attribute (decl_die, decl, ctx);
24175 }
24176
24177 add_alignment_attribute (decl_die, decl);
24178
24179 /* If we have a variant part offset, then we are supposed to process a member
24180 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24181 trees. */
24182 gcc_assert (ctx->variant_part_offset == NULL_TREE
24183 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24184 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24185 add_data_member_location_attribute (decl_die, decl, ctx);
24186
24187 if (DECL_ARTIFICIAL (decl))
24188 add_AT_flag (decl_die, DW_AT_artificial, 1);
24189
24190 add_accessibility_attribute (decl_die, decl);
24191
24192 /* Equate decl number to die, so that we can look up this decl later on. */
24193 equate_decl_number_to_die (decl, decl_die);
24194 }
24195
24196 /* Generate a DIE for a pointer to a member type. TYPE can be an
24197 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24198 pointer to member function. */
24199
24200 static void
24201 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24202 {
24203 if (lookup_type_die (type))
24204 return;
24205
24206 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24207 scope_die_for (type, context_die), type);
24208
24209 equate_type_number_to_die (type, ptr_die);
24210 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24211 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24212 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24213 context_die);
24214 add_alignment_attribute (ptr_die, type);
24215
24216 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24217 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24218 {
24219 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24220 add_AT_loc (ptr_die, DW_AT_use_location, op);
24221 }
24222 }
24223
24224 static char *producer_string;
24225
24226 /* Return a heap allocated producer string including command line options
24227 if -grecord-gcc-switches. */
24228
24229 static char *
24230 gen_producer_string (void)
24231 {
24232 size_t j;
24233 auto_vec<const char *> switches;
24234 const char *language_string = lang_hooks.name;
24235 char *producer, *tail;
24236 const char *p;
24237 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24238 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24239
24240 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24241 switch (save_decoded_options[j].opt_index)
24242 {
24243 case OPT_o:
24244 case OPT_d:
24245 case OPT_dumpbase:
24246 case OPT_dumpdir:
24247 case OPT_auxbase:
24248 case OPT_auxbase_strip:
24249 case OPT_quiet:
24250 case OPT_version:
24251 case OPT_v:
24252 case OPT_w:
24253 case OPT_L:
24254 case OPT_D:
24255 case OPT_I:
24256 case OPT_U:
24257 case OPT_SPECIAL_unknown:
24258 case OPT_SPECIAL_ignore:
24259 case OPT_SPECIAL_program_name:
24260 case OPT_SPECIAL_input_file:
24261 case OPT_grecord_gcc_switches:
24262 case OPT__output_pch_:
24263 case OPT_fdiagnostics_show_location_:
24264 case OPT_fdiagnostics_show_option:
24265 case OPT_fdiagnostics_show_caret:
24266 case OPT_fdiagnostics_color_:
24267 case OPT_fverbose_asm:
24268 case OPT____:
24269 case OPT__sysroot_:
24270 case OPT_nostdinc:
24271 case OPT_nostdinc__:
24272 case OPT_fpreprocessed:
24273 case OPT_fltrans_output_list_:
24274 case OPT_fresolution_:
24275 case OPT_fdebug_prefix_map_:
24276 case OPT_fmacro_prefix_map_:
24277 case OPT_ffile_prefix_map_:
24278 case OPT_fcompare_debug:
24279 case OPT_fchecking:
24280 case OPT_fchecking_:
24281 /* Ignore these. */
24282 continue;
24283 default:
24284 if (cl_options[save_decoded_options[j].opt_index].flags
24285 & CL_NO_DWARF_RECORD)
24286 continue;
24287 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24288 == '-');
24289 switch (save_decoded_options[j].canonical_option[0][1])
24290 {
24291 case 'M':
24292 case 'i':
24293 case 'W':
24294 continue;
24295 case 'f':
24296 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24297 "dump", 4) == 0)
24298 continue;
24299 break;
24300 default:
24301 break;
24302 }
24303 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24304 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24305 break;
24306 }
24307
24308 producer = XNEWVEC (char, plen + 1 + len + 1);
24309 tail = producer;
24310 sprintf (tail, "%s %s", language_string, version_string);
24311 tail += plen;
24312
24313 FOR_EACH_VEC_ELT (switches, j, p)
24314 {
24315 len = strlen (p);
24316 *tail = ' ';
24317 memcpy (tail + 1, p, len);
24318 tail += len + 1;
24319 }
24320
24321 *tail = '\0';
24322 return producer;
24323 }
24324
24325 /* Given a C and/or C++ language/version string return the "highest".
24326 C++ is assumed to be "higher" than C in this case. Used for merging
24327 LTO translation unit languages. */
24328 static const char *
24329 highest_c_language (const char *lang1, const char *lang2)
24330 {
24331 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24332 return "GNU C++17";
24333 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24334 return "GNU C++14";
24335 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24336 return "GNU C++11";
24337 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24338 return "GNU C++98";
24339
24340 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24341 return "GNU C17";
24342 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24343 return "GNU C11";
24344 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24345 return "GNU C99";
24346 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24347 return "GNU C89";
24348
24349 gcc_unreachable ();
24350 }
24351
24352
24353 /* Generate the DIE for the compilation unit. */
24354
24355 static dw_die_ref
24356 gen_compile_unit_die (const char *filename)
24357 {
24358 dw_die_ref die;
24359 const char *language_string = lang_hooks.name;
24360 int language;
24361
24362 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24363
24364 if (filename)
24365 {
24366 add_name_attribute (die, filename);
24367 /* Don't add cwd for <built-in>. */
24368 if (filename[0] != '<')
24369 add_comp_dir_attribute (die);
24370 }
24371
24372 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24373
24374 /* If our producer is LTO try to figure out a common language to use
24375 from the global list of translation units. */
24376 if (strcmp (language_string, "GNU GIMPLE") == 0)
24377 {
24378 unsigned i;
24379 tree t;
24380 const char *common_lang = NULL;
24381
24382 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24383 {
24384 if (!TRANSLATION_UNIT_LANGUAGE (t))
24385 continue;
24386 if (!common_lang)
24387 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24388 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24389 ;
24390 else if (strncmp (common_lang, "GNU C", 5) == 0
24391 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24392 /* Mixing C and C++ is ok, use C++ in that case. */
24393 common_lang = highest_c_language (common_lang,
24394 TRANSLATION_UNIT_LANGUAGE (t));
24395 else
24396 {
24397 /* Fall back to C. */
24398 common_lang = NULL;
24399 break;
24400 }
24401 }
24402
24403 if (common_lang)
24404 language_string = common_lang;
24405 }
24406
24407 language = DW_LANG_C;
24408 if (strncmp (language_string, "GNU C", 5) == 0
24409 && ISDIGIT (language_string[5]))
24410 {
24411 language = DW_LANG_C89;
24412 if (dwarf_version >= 3 || !dwarf_strict)
24413 {
24414 if (strcmp (language_string, "GNU C89") != 0)
24415 language = DW_LANG_C99;
24416
24417 if (dwarf_version >= 5 /* || !dwarf_strict */)
24418 if (strcmp (language_string, "GNU C11") == 0
24419 || strcmp (language_string, "GNU C17") == 0)
24420 language = DW_LANG_C11;
24421 }
24422 }
24423 else if (strncmp (language_string, "GNU C++", 7) == 0)
24424 {
24425 language = DW_LANG_C_plus_plus;
24426 if (dwarf_version >= 5 /* || !dwarf_strict */)
24427 {
24428 if (strcmp (language_string, "GNU C++11") == 0)
24429 language = DW_LANG_C_plus_plus_11;
24430 else if (strcmp (language_string, "GNU C++14") == 0)
24431 language = DW_LANG_C_plus_plus_14;
24432 else if (strcmp (language_string, "GNU C++17") == 0)
24433 /* For now. */
24434 language = DW_LANG_C_plus_plus_14;
24435 }
24436 }
24437 else if (strcmp (language_string, "GNU F77") == 0)
24438 language = DW_LANG_Fortran77;
24439 else if (dwarf_version >= 3 || !dwarf_strict)
24440 {
24441 if (strcmp (language_string, "GNU Ada") == 0)
24442 language = DW_LANG_Ada95;
24443 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24444 {
24445 language = DW_LANG_Fortran95;
24446 if (dwarf_version >= 5 /* || !dwarf_strict */)
24447 {
24448 if (strcmp (language_string, "GNU Fortran2003") == 0)
24449 language = DW_LANG_Fortran03;
24450 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24451 language = DW_LANG_Fortran08;
24452 }
24453 }
24454 else if (strcmp (language_string, "GNU Objective-C") == 0)
24455 language = DW_LANG_ObjC;
24456 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24457 language = DW_LANG_ObjC_plus_plus;
24458 else if (dwarf_version >= 5 || !dwarf_strict)
24459 {
24460 if (strcmp (language_string, "GNU Go") == 0)
24461 language = DW_LANG_Go;
24462 }
24463 }
24464 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24465 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24466 language = DW_LANG_Fortran90;
24467
24468 add_AT_unsigned (die, DW_AT_language, language);
24469
24470 switch (language)
24471 {
24472 case DW_LANG_Fortran77:
24473 case DW_LANG_Fortran90:
24474 case DW_LANG_Fortran95:
24475 case DW_LANG_Fortran03:
24476 case DW_LANG_Fortran08:
24477 /* Fortran has case insensitive identifiers and the front-end
24478 lowercases everything. */
24479 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24480 break;
24481 default:
24482 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24483 break;
24484 }
24485 return die;
24486 }
24487
24488 /* Generate the DIE for a base class. */
24489
24490 static void
24491 gen_inheritance_die (tree binfo, tree access, tree type,
24492 dw_die_ref context_die)
24493 {
24494 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24495 struct vlr_context ctx = { type, NULL };
24496
24497 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24498 context_die);
24499 add_data_member_location_attribute (die, binfo, &ctx);
24500
24501 if (BINFO_VIRTUAL_P (binfo))
24502 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24503
24504 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24505 children, otherwise the default is DW_ACCESS_public. In DWARF2
24506 the default has always been DW_ACCESS_private. */
24507 if (access == access_public_node)
24508 {
24509 if (dwarf_version == 2
24510 || context_die->die_tag == DW_TAG_class_type)
24511 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24512 }
24513 else if (access == access_protected_node)
24514 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24515 else if (dwarf_version > 2
24516 && context_die->die_tag != DW_TAG_class_type)
24517 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24518 }
24519
24520 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24521 structure. */
24522 static bool
24523 is_variant_part (tree decl)
24524 {
24525 return (TREE_CODE (decl) == FIELD_DECL
24526 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24527 }
24528
24529 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24530 return the FIELD_DECL. Return NULL_TREE otherwise. */
24531
24532 static tree
24533 analyze_discr_in_predicate (tree operand, tree struct_type)
24534 {
24535 bool continue_stripping = true;
24536 while (continue_stripping)
24537 switch (TREE_CODE (operand))
24538 {
24539 CASE_CONVERT:
24540 operand = TREE_OPERAND (operand, 0);
24541 break;
24542 default:
24543 continue_stripping = false;
24544 break;
24545 }
24546
24547 /* Match field access to members of struct_type only. */
24548 if (TREE_CODE (operand) == COMPONENT_REF
24549 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24550 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24551 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24552 return TREE_OPERAND (operand, 1);
24553 else
24554 return NULL_TREE;
24555 }
24556
24557 /* Check that SRC is a constant integer that can be represented as a native
24558 integer constant (either signed or unsigned). If so, store it into DEST and
24559 return true. Return false otherwise. */
24560
24561 static bool
24562 get_discr_value (tree src, dw_discr_value *dest)
24563 {
24564 tree discr_type = TREE_TYPE (src);
24565
24566 if (lang_hooks.types.get_debug_type)
24567 {
24568 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24569 if (debug_type != NULL)
24570 discr_type = debug_type;
24571 }
24572
24573 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24574 return false;
24575
24576 /* Signedness can vary between the original type and the debug type. This
24577 can happen for character types in Ada for instance: the character type
24578 used for code generation can be signed, to be compatible with the C one,
24579 but from a debugger point of view, it must be unsigned. */
24580 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24581 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24582
24583 if (is_orig_unsigned != is_debug_unsigned)
24584 src = fold_convert (discr_type, src);
24585
24586 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24587 return false;
24588
24589 dest->pos = is_debug_unsigned;
24590 if (is_debug_unsigned)
24591 dest->v.uval = tree_to_uhwi (src);
24592 else
24593 dest->v.sval = tree_to_shwi (src);
24594
24595 return true;
24596 }
24597
24598 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24599 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24600 store NULL_TREE in DISCR_DECL. Otherwise:
24601
24602 - store the discriminant field in STRUCT_TYPE that controls the variant
24603 part to *DISCR_DECL
24604
24605 - put in *DISCR_LISTS_P an array where for each variant, the item
24606 represents the corresponding matching list of discriminant values.
24607
24608 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24609 the above array.
24610
24611 Note that when the array is allocated (i.e. when the analysis is
24612 successful), it is up to the caller to free the array. */
24613
24614 static void
24615 analyze_variants_discr (tree variant_part_decl,
24616 tree struct_type,
24617 tree *discr_decl,
24618 dw_discr_list_ref **discr_lists_p,
24619 unsigned *discr_lists_length)
24620 {
24621 tree variant_part_type = TREE_TYPE (variant_part_decl);
24622 tree variant;
24623 dw_discr_list_ref *discr_lists;
24624 unsigned i;
24625
24626 /* Compute how many variants there are in this variant part. */
24627 *discr_lists_length = 0;
24628 for (variant = TYPE_FIELDS (variant_part_type);
24629 variant != NULL_TREE;
24630 variant = DECL_CHAIN (variant))
24631 ++*discr_lists_length;
24632
24633 *discr_decl = NULL_TREE;
24634 *discr_lists_p
24635 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24636 sizeof (**discr_lists_p));
24637 discr_lists = *discr_lists_p;
24638
24639 /* And then analyze all variants to extract discriminant information for all
24640 of them. This analysis is conservative: as soon as we detect something we
24641 do not support, abort everything and pretend we found nothing. */
24642 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24643 variant != NULL_TREE;
24644 variant = DECL_CHAIN (variant), ++i)
24645 {
24646 tree match_expr = DECL_QUALIFIER (variant);
24647
24648 /* Now, try to analyze the predicate and deduce a discriminant for
24649 it. */
24650 if (match_expr == boolean_true_node)
24651 /* Typically happens for the default variant: it matches all cases that
24652 previous variants rejected. Don't output any matching value for
24653 this one. */
24654 continue;
24655
24656 /* The following loop tries to iterate over each discriminant
24657 possibility: single values or ranges. */
24658 while (match_expr != NULL_TREE)
24659 {
24660 tree next_round_match_expr;
24661 tree candidate_discr = NULL_TREE;
24662 dw_discr_list_ref new_node = NULL;
24663
24664 /* Possibilities are matched one after the other by nested
24665 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24666 continue with the rest at next iteration. */
24667 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24668 {
24669 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24670 match_expr = TREE_OPERAND (match_expr, 1);
24671 }
24672 else
24673 next_round_match_expr = NULL_TREE;
24674
24675 if (match_expr == boolean_false_node)
24676 /* This sub-expression matches nothing: just wait for the next
24677 one. */
24678 ;
24679
24680 else if (TREE_CODE (match_expr) == EQ_EXPR)
24681 {
24682 /* We are matching: <discr_field> == <integer_cst>
24683 This sub-expression matches a single value. */
24684 tree integer_cst = TREE_OPERAND (match_expr, 1);
24685
24686 candidate_discr
24687 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24688 struct_type);
24689
24690 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24691 if (!get_discr_value (integer_cst,
24692 &new_node->dw_discr_lower_bound))
24693 goto abort;
24694 new_node->dw_discr_range = false;
24695 }
24696
24697 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24698 {
24699 /* We are matching:
24700 <discr_field> > <integer_cst>
24701 && <discr_field> < <integer_cst>.
24702 This sub-expression matches the range of values between the
24703 two matched integer constants. Note that comparisons can be
24704 inclusive or exclusive. */
24705 tree candidate_discr_1, candidate_discr_2;
24706 tree lower_cst, upper_cst;
24707 bool lower_cst_included, upper_cst_included;
24708 tree lower_op = TREE_OPERAND (match_expr, 0);
24709 tree upper_op = TREE_OPERAND (match_expr, 1);
24710
24711 /* When the comparison is exclusive, the integer constant is not
24712 the discriminant range bound we are looking for: we will have
24713 to increment or decrement it. */
24714 if (TREE_CODE (lower_op) == GE_EXPR)
24715 lower_cst_included = true;
24716 else if (TREE_CODE (lower_op) == GT_EXPR)
24717 lower_cst_included = false;
24718 else
24719 goto abort;
24720
24721 if (TREE_CODE (upper_op) == LE_EXPR)
24722 upper_cst_included = true;
24723 else if (TREE_CODE (upper_op) == LT_EXPR)
24724 upper_cst_included = false;
24725 else
24726 goto abort;
24727
24728 /* Extract the discriminant from the first operand and check it
24729 is consistant with the same analysis in the second
24730 operand. */
24731 candidate_discr_1
24732 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24733 struct_type);
24734 candidate_discr_2
24735 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24736 struct_type);
24737 if (candidate_discr_1 == candidate_discr_2)
24738 candidate_discr = candidate_discr_1;
24739 else
24740 goto abort;
24741
24742 /* Extract bounds from both. */
24743 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24744 lower_cst = TREE_OPERAND (lower_op, 1);
24745 upper_cst = TREE_OPERAND (upper_op, 1);
24746
24747 if (!lower_cst_included)
24748 lower_cst
24749 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24750 build_int_cst (TREE_TYPE (lower_cst), 1));
24751 if (!upper_cst_included)
24752 upper_cst
24753 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24754 build_int_cst (TREE_TYPE (upper_cst), 1));
24755
24756 if (!get_discr_value (lower_cst,
24757 &new_node->dw_discr_lower_bound)
24758 || !get_discr_value (upper_cst,
24759 &new_node->dw_discr_upper_bound))
24760 goto abort;
24761
24762 new_node->dw_discr_range = true;
24763 }
24764
24765 else
24766 /* Unsupported sub-expression: we cannot determine the set of
24767 matching discriminant values. Abort everything. */
24768 goto abort;
24769
24770 /* If the discriminant info is not consistant with what we saw so
24771 far, consider the analysis failed and abort everything. */
24772 if (candidate_discr == NULL_TREE
24773 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24774 goto abort;
24775 else
24776 *discr_decl = candidate_discr;
24777
24778 if (new_node != NULL)
24779 {
24780 new_node->dw_discr_next = discr_lists[i];
24781 discr_lists[i] = new_node;
24782 }
24783 match_expr = next_round_match_expr;
24784 }
24785 }
24786
24787 /* If we reach this point, we could match everything we were interested
24788 in. */
24789 return;
24790
24791 abort:
24792 /* Clean all data structure and return no result. */
24793 free (*discr_lists_p);
24794 *discr_lists_p = NULL;
24795 *discr_decl = NULL_TREE;
24796 }
24797
24798 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24799 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24800 under CONTEXT_DIE.
24801
24802 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24803 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24804 this type, which are record types, represent the available variants and each
24805 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24806 values are inferred from these attributes.
24807
24808 In trees, the offsets for the fields inside these sub-records are relative
24809 to the variant part itself, whereas the corresponding DIEs should have
24810 offset attributes that are relative to the embedding record base address.
24811 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24812 must be an expression that computes the offset of the variant part to
24813 describe in DWARF. */
24814
24815 static void
24816 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24817 dw_die_ref context_die)
24818 {
24819 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24820 tree variant_part_offset = vlr_ctx->variant_part_offset;
24821 struct loc_descr_context ctx = {
24822 vlr_ctx->struct_type, /* context_type */
24823 NULL_TREE, /* base_decl */
24824 NULL, /* dpi */
24825 false, /* placeholder_arg */
24826 false /* placeholder_seen */
24827 };
24828
24829 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24830 NULL_TREE if there is no such field. */
24831 tree discr_decl = NULL_TREE;
24832 dw_discr_list_ref *discr_lists;
24833 unsigned discr_lists_length = 0;
24834 unsigned i;
24835
24836 dw_die_ref dwarf_proc_die = NULL;
24837 dw_die_ref variant_part_die
24838 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24839
24840 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24841
24842 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24843 &discr_decl, &discr_lists, &discr_lists_length);
24844
24845 if (discr_decl != NULL_TREE)
24846 {
24847 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24848
24849 if (discr_die)
24850 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24851 else
24852 /* We have no DIE for the discriminant, so just discard all
24853 discrimimant information in the output. */
24854 discr_decl = NULL_TREE;
24855 }
24856
24857 /* If the offset for this variant part is more complex than a constant,
24858 create a DWARF procedure for it so that we will not have to generate DWARF
24859 expressions for it for each member. */
24860 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24861 && (dwarf_version >= 3 || !dwarf_strict))
24862 {
24863 const tree dwarf_proc_fndecl
24864 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24865 build_function_type (TREE_TYPE (variant_part_offset),
24866 NULL_TREE));
24867 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24868 const dw_loc_descr_ref dwarf_proc_body
24869 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24870
24871 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24872 dwarf_proc_fndecl, context_die);
24873 if (dwarf_proc_die != NULL)
24874 variant_part_offset = dwarf_proc_call;
24875 }
24876
24877 /* Output DIEs for all variants. */
24878 i = 0;
24879 for (tree variant = TYPE_FIELDS (variant_part_type);
24880 variant != NULL_TREE;
24881 variant = DECL_CHAIN (variant), ++i)
24882 {
24883 tree variant_type = TREE_TYPE (variant);
24884 dw_die_ref variant_die;
24885
24886 /* All variants (i.e. members of a variant part) are supposed to be
24887 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24888 under these records. */
24889 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24890
24891 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24892 equate_decl_number_to_die (variant, variant_die);
24893
24894 /* Output discriminant values this variant matches, if any. */
24895 if (discr_decl == NULL || discr_lists[i] == NULL)
24896 /* In the case we have discriminant information at all, this is
24897 probably the default variant: as the standard says, don't
24898 output any discriminant value/list attribute. */
24899 ;
24900 else if (discr_lists[i]->dw_discr_next == NULL
24901 && !discr_lists[i]->dw_discr_range)
24902 /* If there is only one accepted value, don't bother outputting a
24903 list. */
24904 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24905 else
24906 add_discr_list (variant_die, discr_lists[i]);
24907
24908 for (tree member = TYPE_FIELDS (variant_type);
24909 member != NULL_TREE;
24910 member = DECL_CHAIN (member))
24911 {
24912 struct vlr_context vlr_sub_ctx = {
24913 vlr_ctx->struct_type, /* struct_type */
24914 NULL /* variant_part_offset */
24915 };
24916 if (is_variant_part (member))
24917 {
24918 /* All offsets for fields inside variant parts are relative to
24919 the top-level embedding RECORD_TYPE's base address. On the
24920 other hand, offsets in GCC's types are relative to the
24921 nested-most variant part. So we have to sum offsets each time
24922 we recurse. */
24923
24924 vlr_sub_ctx.variant_part_offset
24925 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24926 variant_part_offset, byte_position (member));
24927 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24928 }
24929 else
24930 {
24931 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24932 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24933 }
24934 }
24935 }
24936
24937 free (discr_lists);
24938 }
24939
24940 /* Generate a DIE for a class member. */
24941
24942 static void
24943 gen_member_die (tree type, dw_die_ref context_die)
24944 {
24945 tree member;
24946 tree binfo = TYPE_BINFO (type);
24947
24948 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24949
24950 /* If this is not an incomplete type, output descriptions of each of its
24951 members. Note that as we output the DIEs necessary to represent the
24952 members of this record or union type, we will also be trying to output
24953 DIEs to represent the *types* of those members. However the `type'
24954 function (above) will specifically avoid generating type DIEs for member
24955 types *within* the list of member DIEs for this (containing) type except
24956 for those types (of members) which are explicitly marked as also being
24957 members of this (containing) type themselves. The g++ front- end can
24958 force any given type to be treated as a member of some other (containing)
24959 type by setting the TYPE_CONTEXT of the given (member) type to point to
24960 the TREE node representing the appropriate (containing) type. */
24961
24962 /* First output info about the base classes. */
24963 if (binfo)
24964 {
24965 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24966 int i;
24967 tree base;
24968
24969 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24970 gen_inheritance_die (base,
24971 (accesses ? (*accesses)[i] : access_public_node),
24972 type,
24973 context_die);
24974 }
24975
24976 /* Now output info about the data members and type members. */
24977 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24978 {
24979 struct vlr_context vlr_ctx = { type, NULL_TREE };
24980 bool static_inline_p
24981 = (TREE_STATIC (member)
24982 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24983 != -1));
24984
24985 /* Ignore clones. */
24986 if (DECL_ABSTRACT_ORIGIN (member))
24987 continue;
24988
24989 /* If we thought we were generating minimal debug info for TYPE
24990 and then changed our minds, some of the member declarations
24991 may have already been defined. Don't define them again, but
24992 do put them in the right order. */
24993
24994 if (dw_die_ref child = lookup_decl_die (member))
24995 {
24996 /* Handle inline static data members, which only have in-class
24997 declarations. */
24998 dw_die_ref ref = NULL;
24999 if (child->die_tag == DW_TAG_variable
25000 && child->die_parent == comp_unit_die ())
25001 {
25002 ref = get_AT_ref (child, DW_AT_specification);
25003 /* For C++17 inline static data members followed by redundant
25004 out of class redeclaration, we might get here with
25005 child being the DIE created for the out of class
25006 redeclaration and with its DW_AT_specification being
25007 the DIE created for in-class definition. We want to
25008 reparent the latter, and don't want to create another
25009 DIE with DW_AT_specification in that case, because
25010 we already have one. */
25011 if (ref
25012 && static_inline_p
25013 && ref->die_tag == DW_TAG_variable
25014 && ref->die_parent == comp_unit_die ()
25015 && get_AT (ref, DW_AT_specification) == NULL)
25016 {
25017 child = ref;
25018 ref = NULL;
25019 static_inline_p = false;
25020 }
25021 }
25022
25023 if (child->die_tag == DW_TAG_variable
25024 && child->die_parent == comp_unit_die ()
25025 && ref == NULL)
25026 {
25027 reparent_child (child, context_die);
25028 if (dwarf_version < 5)
25029 child->die_tag = DW_TAG_member;
25030 }
25031 else
25032 splice_child_die (context_die, child);
25033 }
25034
25035 /* Do not generate standard DWARF for variant parts if we are generating
25036 the corresponding GNAT encodings: DIEs generated for both would
25037 conflict in our mappings. */
25038 else if (is_variant_part (member)
25039 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25040 {
25041 vlr_ctx.variant_part_offset = byte_position (member);
25042 gen_variant_part (member, &vlr_ctx, context_die);
25043 }
25044 else
25045 {
25046 vlr_ctx.variant_part_offset = NULL_TREE;
25047 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25048 }
25049
25050 /* For C++ inline static data members emit immediately a DW_TAG_variable
25051 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25052 DW_AT_specification. */
25053 if (static_inline_p)
25054 {
25055 int old_extern = DECL_EXTERNAL (member);
25056 DECL_EXTERNAL (member) = 0;
25057 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25058 DECL_EXTERNAL (member) = old_extern;
25059 }
25060 }
25061 }
25062
25063 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25064 is set, we pretend that the type was never defined, so we only get the
25065 member DIEs needed by later specification DIEs. */
25066
25067 static void
25068 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25069 enum debug_info_usage usage)
25070 {
25071 if (TREE_ASM_WRITTEN (type))
25072 {
25073 /* Fill in the bound of variable-length fields in late dwarf if
25074 still incomplete. */
25075 if (!early_dwarf && variably_modified_type_p (type, NULL))
25076 for (tree member = TYPE_FIELDS (type);
25077 member;
25078 member = DECL_CHAIN (member))
25079 fill_variable_array_bounds (TREE_TYPE (member));
25080 return;
25081 }
25082
25083 dw_die_ref type_die = lookup_type_die (type);
25084 dw_die_ref scope_die = 0;
25085 int nested = 0;
25086 int complete = (TYPE_SIZE (type)
25087 && (! TYPE_STUB_DECL (type)
25088 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25089 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25090 complete = complete && should_emit_struct_debug (type, usage);
25091
25092 if (type_die && ! complete)
25093 return;
25094
25095 if (TYPE_CONTEXT (type) != NULL_TREE
25096 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25097 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25098 nested = 1;
25099
25100 scope_die = scope_die_for (type, context_die);
25101
25102 /* Generate child dies for template paramaters. */
25103 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25104 schedule_generic_params_dies_gen (type);
25105
25106 if (! type_die || (nested && is_cu_die (scope_die)))
25107 /* First occurrence of type or toplevel definition of nested class. */
25108 {
25109 dw_die_ref old_die = type_die;
25110
25111 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25112 ? record_type_tag (type) : DW_TAG_union_type,
25113 scope_die, type);
25114 equate_type_number_to_die (type, type_die);
25115 if (old_die)
25116 add_AT_specification (type_die, old_die);
25117 else
25118 add_name_attribute (type_die, type_tag (type));
25119 }
25120 else
25121 remove_AT (type_die, DW_AT_declaration);
25122
25123 /* If this type has been completed, then give it a byte_size attribute and
25124 then give a list of members. */
25125 if (complete && !ns_decl)
25126 {
25127 /* Prevent infinite recursion in cases where the type of some member of
25128 this type is expressed in terms of this type itself. */
25129 TREE_ASM_WRITTEN (type) = 1;
25130 add_byte_size_attribute (type_die, type);
25131 add_alignment_attribute (type_die, type);
25132 if (TYPE_STUB_DECL (type) != NULL_TREE)
25133 {
25134 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25135 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25136 }
25137
25138 /* If the first reference to this type was as the return type of an
25139 inline function, then it may not have a parent. Fix this now. */
25140 if (type_die->die_parent == NULL)
25141 add_child_die (scope_die, type_die);
25142
25143 push_decl_scope (type);
25144 gen_member_die (type, type_die);
25145 pop_decl_scope ();
25146
25147 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25148 if (TYPE_ARTIFICIAL (type))
25149 add_AT_flag (type_die, DW_AT_artificial, 1);
25150
25151 /* GNU extension: Record what type our vtable lives in. */
25152 if (TYPE_VFIELD (type))
25153 {
25154 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25155
25156 gen_type_die (vtype, context_die);
25157 add_AT_die_ref (type_die, DW_AT_containing_type,
25158 lookup_type_die (vtype));
25159 }
25160 }
25161 else
25162 {
25163 add_AT_flag (type_die, DW_AT_declaration, 1);
25164
25165 /* We don't need to do this for function-local types. */
25166 if (TYPE_STUB_DECL (type)
25167 && ! decl_function_context (TYPE_STUB_DECL (type)))
25168 vec_safe_push (incomplete_types, type);
25169 }
25170
25171 if (get_AT (type_die, DW_AT_name))
25172 add_pubtype (type, type_die);
25173 }
25174
25175 /* Generate a DIE for a subroutine _type_. */
25176
25177 static void
25178 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25179 {
25180 tree return_type = TREE_TYPE (type);
25181 dw_die_ref subr_die
25182 = new_die (DW_TAG_subroutine_type,
25183 scope_die_for (type, context_die), type);
25184
25185 equate_type_number_to_die (type, subr_die);
25186 add_prototyped_attribute (subr_die, type);
25187 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25188 context_die);
25189 add_alignment_attribute (subr_die, type);
25190 gen_formal_types_die (type, subr_die);
25191
25192 if (get_AT (subr_die, DW_AT_name))
25193 add_pubtype (type, subr_die);
25194 if ((dwarf_version >= 5 || !dwarf_strict)
25195 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25196 add_AT_flag (subr_die, DW_AT_reference, 1);
25197 if ((dwarf_version >= 5 || !dwarf_strict)
25198 && lang_hooks.types.type_dwarf_attribute (type,
25199 DW_AT_rvalue_reference) != -1)
25200 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25201 }
25202
25203 /* Generate a DIE for a type definition. */
25204
25205 static void
25206 gen_typedef_die (tree decl, dw_die_ref context_die)
25207 {
25208 dw_die_ref type_die;
25209 tree type;
25210
25211 if (TREE_ASM_WRITTEN (decl))
25212 {
25213 if (DECL_ORIGINAL_TYPE (decl))
25214 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25215 return;
25216 }
25217
25218 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25219 checks in process_scope_var and modified_type_die), this should be called
25220 only for original types. */
25221 gcc_assert (decl_ultimate_origin (decl) == NULL
25222 || decl_ultimate_origin (decl) == decl);
25223
25224 TREE_ASM_WRITTEN (decl) = 1;
25225 type_die = new_die (DW_TAG_typedef, context_die, decl);
25226
25227 add_name_and_src_coords_attributes (type_die, decl);
25228 if (DECL_ORIGINAL_TYPE (decl))
25229 {
25230 type = DECL_ORIGINAL_TYPE (decl);
25231 if (type == error_mark_node)
25232 return;
25233
25234 gcc_assert (type != TREE_TYPE (decl));
25235 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25236 }
25237 else
25238 {
25239 type = TREE_TYPE (decl);
25240 if (type == error_mark_node)
25241 return;
25242
25243 if (is_naming_typedef_decl (TYPE_NAME (type)))
25244 {
25245 /* Here, we are in the case of decl being a typedef naming
25246 an anonymous type, e.g:
25247 typedef struct {...} foo;
25248 In that case TREE_TYPE (decl) is not a typedef variant
25249 type and TYPE_NAME of the anonymous type is set to the
25250 TYPE_DECL of the typedef. This construct is emitted by
25251 the C++ FE.
25252
25253 TYPE is the anonymous struct named by the typedef
25254 DECL. As we need the DW_AT_type attribute of the
25255 DW_TAG_typedef to point to the DIE of TYPE, let's
25256 generate that DIE right away. add_type_attribute
25257 called below will then pick (via lookup_type_die) that
25258 anonymous struct DIE. */
25259 if (!TREE_ASM_WRITTEN (type))
25260 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25261
25262 /* This is a GNU Extension. We are adding a
25263 DW_AT_linkage_name attribute to the DIE of the
25264 anonymous struct TYPE. The value of that attribute
25265 is the name of the typedef decl naming the anonymous
25266 struct. This greatly eases the work of consumers of
25267 this debug info. */
25268 add_linkage_name_raw (lookup_type_die (type), decl);
25269 }
25270 }
25271
25272 add_type_attribute (type_die, type, decl_quals (decl), false,
25273 context_die);
25274
25275 if (is_naming_typedef_decl (decl))
25276 /* We want that all subsequent calls to lookup_type_die with
25277 TYPE in argument yield the DW_TAG_typedef we have just
25278 created. */
25279 equate_type_number_to_die (type, type_die);
25280
25281 add_alignment_attribute (type_die, TREE_TYPE (decl));
25282
25283 add_accessibility_attribute (type_die, decl);
25284
25285 if (DECL_ABSTRACT_P (decl))
25286 equate_decl_number_to_die (decl, type_die);
25287
25288 if (get_AT (type_die, DW_AT_name))
25289 add_pubtype (decl, type_die);
25290 }
25291
25292 /* Generate a DIE for a struct, class, enum or union type. */
25293
25294 static void
25295 gen_tagged_type_die (tree type,
25296 dw_die_ref context_die,
25297 enum debug_info_usage usage)
25298 {
25299 int need_pop;
25300
25301 if (type == NULL_TREE
25302 || !is_tagged_type (type))
25303 return;
25304
25305 if (TREE_ASM_WRITTEN (type))
25306 need_pop = 0;
25307 /* If this is a nested type whose containing class hasn't been written
25308 out yet, writing it out will cover this one, too. This does not apply
25309 to instantiations of member class templates; they need to be added to
25310 the containing class as they are generated. FIXME: This hurts the
25311 idea of combining type decls from multiple TUs, since we can't predict
25312 what set of template instantiations we'll get. */
25313 else if (TYPE_CONTEXT (type)
25314 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25315 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25316 {
25317 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25318
25319 if (TREE_ASM_WRITTEN (type))
25320 return;
25321
25322 /* If that failed, attach ourselves to the stub. */
25323 push_decl_scope (TYPE_CONTEXT (type));
25324 context_die = lookup_type_die (TYPE_CONTEXT (type));
25325 need_pop = 1;
25326 }
25327 else if (TYPE_CONTEXT (type) != NULL_TREE
25328 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25329 {
25330 /* If this type is local to a function that hasn't been written
25331 out yet, use a NULL context for now; it will be fixed up in
25332 decls_for_scope. */
25333 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25334 /* A declaration DIE doesn't count; nested types need to go in the
25335 specification. */
25336 if (context_die && is_declaration_die (context_die))
25337 context_die = NULL;
25338 need_pop = 0;
25339 }
25340 else
25341 {
25342 context_die = declare_in_namespace (type, context_die);
25343 need_pop = 0;
25344 }
25345
25346 if (TREE_CODE (type) == ENUMERAL_TYPE)
25347 {
25348 /* This might have been written out by the call to
25349 declare_in_namespace. */
25350 if (!TREE_ASM_WRITTEN (type))
25351 gen_enumeration_type_die (type, context_die);
25352 }
25353 else
25354 gen_struct_or_union_type_die (type, context_die, usage);
25355
25356 if (need_pop)
25357 pop_decl_scope ();
25358
25359 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25360 it up if it is ever completed. gen_*_type_die will set it for us
25361 when appropriate. */
25362 }
25363
25364 /* Generate a type description DIE. */
25365
25366 static void
25367 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25368 enum debug_info_usage usage)
25369 {
25370 struct array_descr_info info;
25371
25372 if (type == NULL_TREE || type == error_mark_node)
25373 return;
25374
25375 if (flag_checking && type)
25376 verify_type (type);
25377
25378 if (TYPE_NAME (type) != NULL_TREE
25379 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25380 && is_redundant_typedef (TYPE_NAME (type))
25381 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25382 /* The DECL of this type is a typedef we don't want to emit debug
25383 info for but we want debug info for its underlying typedef.
25384 This can happen for e.g, the injected-class-name of a C++
25385 type. */
25386 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25387
25388 /* If TYPE is a typedef type variant, let's generate debug info
25389 for the parent typedef which TYPE is a type of. */
25390 if (typedef_variant_p (type))
25391 {
25392 if (TREE_ASM_WRITTEN (type))
25393 return;
25394
25395 tree name = TYPE_NAME (type);
25396 tree origin = decl_ultimate_origin (name);
25397 if (origin != NULL && origin != name)
25398 {
25399 gen_decl_die (origin, NULL, NULL, context_die);
25400 return;
25401 }
25402
25403 /* Prevent broken recursion; we can't hand off to the same type. */
25404 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25405
25406 /* Give typedefs the right scope. */
25407 context_die = scope_die_for (type, context_die);
25408
25409 TREE_ASM_WRITTEN (type) = 1;
25410
25411 gen_decl_die (name, NULL, NULL, context_die);
25412 return;
25413 }
25414
25415 /* If type is an anonymous tagged type named by a typedef, let's
25416 generate debug info for the typedef. */
25417 if (is_naming_typedef_decl (TYPE_NAME (type)))
25418 {
25419 /* Use the DIE of the containing namespace as the parent DIE of
25420 the type description DIE we want to generate. */
25421 if (DECL_CONTEXT (TYPE_NAME (type))
25422 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
25423 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
25424
25425 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25426 return;
25427 }
25428
25429 if (lang_hooks.types.get_debug_type)
25430 {
25431 tree debug_type = lang_hooks.types.get_debug_type (type);
25432
25433 if (debug_type != NULL_TREE && debug_type != type)
25434 {
25435 gen_type_die_with_usage (debug_type, context_die, usage);
25436 return;
25437 }
25438 }
25439
25440 /* We are going to output a DIE to represent the unqualified version
25441 of this type (i.e. without any const or volatile qualifiers) so
25442 get the main variant (i.e. the unqualified version) of this type
25443 now. (Vectors and arrays are special because the debugging info is in the
25444 cloned type itself. Similarly function/method types can contain extra
25445 ref-qualification). */
25446 if (TREE_CODE (type) == FUNCTION_TYPE
25447 || TREE_CODE (type) == METHOD_TYPE)
25448 {
25449 /* For function/method types, can't use type_main_variant here,
25450 because that can have different ref-qualifiers for C++,
25451 but try to canonicalize. */
25452 tree main = TYPE_MAIN_VARIANT (type);
25453 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25454 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25455 && check_base_type (t, main)
25456 && check_lang_type (t, type))
25457 {
25458 type = t;
25459 break;
25460 }
25461 }
25462 else if (TREE_CODE (type) != VECTOR_TYPE
25463 && TREE_CODE (type) != ARRAY_TYPE)
25464 type = type_main_variant (type);
25465
25466 /* If this is an array type with hidden descriptor, handle it first. */
25467 if (!TREE_ASM_WRITTEN (type)
25468 && lang_hooks.types.get_array_descr_info)
25469 {
25470 memset (&info, 0, sizeof (info));
25471 if (lang_hooks.types.get_array_descr_info (type, &info))
25472 {
25473 /* Fortran sometimes emits array types with no dimension. */
25474 gcc_assert (info.ndimensions >= 0
25475 && (info.ndimensions
25476 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25477 gen_descr_array_type_die (type, &info, context_die);
25478 TREE_ASM_WRITTEN (type) = 1;
25479 return;
25480 }
25481 }
25482
25483 if (TREE_ASM_WRITTEN (type))
25484 {
25485 /* Variable-length types may be incomplete even if
25486 TREE_ASM_WRITTEN. For such types, fall through to
25487 gen_array_type_die() and possibly fill in
25488 DW_AT_{upper,lower}_bound attributes. */
25489 if ((TREE_CODE (type) != ARRAY_TYPE
25490 && TREE_CODE (type) != RECORD_TYPE
25491 && TREE_CODE (type) != UNION_TYPE
25492 && TREE_CODE (type) != QUAL_UNION_TYPE)
25493 || !variably_modified_type_p (type, NULL))
25494 return;
25495 }
25496
25497 switch (TREE_CODE (type))
25498 {
25499 case ERROR_MARK:
25500 break;
25501
25502 case POINTER_TYPE:
25503 case REFERENCE_TYPE:
25504 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25505 ensures that the gen_type_die recursion will terminate even if the
25506 type is recursive. Recursive types are possible in Ada. */
25507 /* ??? We could perhaps do this for all types before the switch
25508 statement. */
25509 TREE_ASM_WRITTEN (type) = 1;
25510
25511 /* For these types, all that is required is that we output a DIE (or a
25512 set of DIEs) to represent the "basis" type. */
25513 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25514 DINFO_USAGE_IND_USE);
25515 break;
25516
25517 case OFFSET_TYPE:
25518 /* This code is used for C++ pointer-to-data-member types.
25519 Output a description of the relevant class type. */
25520 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25521 DINFO_USAGE_IND_USE);
25522
25523 /* Output a description of the type of the object pointed to. */
25524 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25525 DINFO_USAGE_IND_USE);
25526
25527 /* Now output a DIE to represent this pointer-to-data-member type
25528 itself. */
25529 gen_ptr_to_mbr_type_die (type, context_die);
25530 break;
25531
25532 case FUNCTION_TYPE:
25533 /* Force out return type (in case it wasn't forced out already). */
25534 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25535 DINFO_USAGE_DIR_USE);
25536 gen_subroutine_type_die (type, context_die);
25537 break;
25538
25539 case METHOD_TYPE:
25540 /* Force out return type (in case it wasn't forced out already). */
25541 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25542 DINFO_USAGE_DIR_USE);
25543 gen_subroutine_type_die (type, context_die);
25544 break;
25545
25546 case ARRAY_TYPE:
25547 case VECTOR_TYPE:
25548 gen_array_type_die (type, context_die);
25549 break;
25550
25551 case ENUMERAL_TYPE:
25552 case RECORD_TYPE:
25553 case UNION_TYPE:
25554 case QUAL_UNION_TYPE:
25555 gen_tagged_type_die (type, context_die, usage);
25556 return;
25557
25558 case VOID_TYPE:
25559 case INTEGER_TYPE:
25560 case REAL_TYPE:
25561 case FIXED_POINT_TYPE:
25562 case COMPLEX_TYPE:
25563 case BOOLEAN_TYPE:
25564 case POINTER_BOUNDS_TYPE:
25565 /* No DIEs needed for fundamental types. */
25566 break;
25567
25568 case NULLPTR_TYPE:
25569 case LANG_TYPE:
25570 /* Just use DW_TAG_unspecified_type. */
25571 {
25572 dw_die_ref type_die = lookup_type_die (type);
25573 if (type_die == NULL)
25574 {
25575 tree name = TYPE_IDENTIFIER (type);
25576 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25577 type);
25578 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25579 equate_type_number_to_die (type, type_die);
25580 }
25581 }
25582 break;
25583
25584 default:
25585 if (is_cxx_auto (type))
25586 {
25587 tree name = TYPE_IDENTIFIER (type);
25588 dw_die_ref *die = (name == get_identifier ("auto")
25589 ? &auto_die : &decltype_auto_die);
25590 if (!*die)
25591 {
25592 *die = new_die (DW_TAG_unspecified_type,
25593 comp_unit_die (), NULL_TREE);
25594 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25595 }
25596 equate_type_number_to_die (type, *die);
25597 break;
25598 }
25599 gcc_unreachable ();
25600 }
25601
25602 TREE_ASM_WRITTEN (type) = 1;
25603 }
25604
25605 static void
25606 gen_type_die (tree type, dw_die_ref context_die)
25607 {
25608 if (type != error_mark_node)
25609 {
25610 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25611 if (flag_checking)
25612 {
25613 dw_die_ref die = lookup_type_die (type);
25614 if (die)
25615 check_die (die);
25616 }
25617 }
25618 }
25619
25620 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25621 things which are local to the given block. */
25622
25623 static void
25624 gen_block_die (tree stmt, dw_die_ref context_die)
25625 {
25626 int must_output_die = 0;
25627 bool inlined_func;
25628
25629 /* Ignore blocks that are NULL. */
25630 if (stmt == NULL_TREE)
25631 return;
25632
25633 inlined_func = inlined_function_outer_scope_p (stmt);
25634
25635 /* If the block is one fragment of a non-contiguous block, do not
25636 process the variables, since they will have been done by the
25637 origin block. Do process subblocks. */
25638 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25639 {
25640 tree sub;
25641
25642 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25643 gen_block_die (sub, context_die);
25644
25645 return;
25646 }
25647
25648 /* Determine if we need to output any Dwarf DIEs at all to represent this
25649 block. */
25650 if (inlined_func)
25651 /* The outer scopes for inlinings *must* always be represented. We
25652 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25653 must_output_die = 1;
25654 else
25655 {
25656 /* Determine if this block directly contains any "significant"
25657 local declarations which we will need to output DIEs for. */
25658 if (debug_info_level > DINFO_LEVEL_TERSE)
25659 /* We are not in terse mode so *any* local declaration counts
25660 as being a "significant" one. */
25661 must_output_die = ((BLOCK_VARS (stmt) != NULL
25662 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25663 && (TREE_USED (stmt)
25664 || TREE_ASM_WRITTEN (stmt)
25665 || BLOCK_ABSTRACT (stmt)));
25666 else if ((TREE_USED (stmt)
25667 || TREE_ASM_WRITTEN (stmt)
25668 || BLOCK_ABSTRACT (stmt))
25669 && !dwarf2out_ignore_block (stmt))
25670 must_output_die = 1;
25671 }
25672
25673 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25674 DIE for any block which contains no significant local declarations at
25675 all. Rather, in such cases we just call `decls_for_scope' so that any
25676 needed Dwarf info for any sub-blocks will get properly generated. Note
25677 that in terse mode, our definition of what constitutes a "significant"
25678 local declaration gets restricted to include only inlined function
25679 instances and local (nested) function definitions. */
25680 if (must_output_die)
25681 {
25682 if (inlined_func)
25683 {
25684 /* If STMT block is abstract, that means we have been called
25685 indirectly from dwarf2out_abstract_function.
25686 That function rightfully marks the descendent blocks (of
25687 the abstract function it is dealing with) as being abstract,
25688 precisely to prevent us from emitting any
25689 DW_TAG_inlined_subroutine DIE as a descendent
25690 of an abstract function instance. So in that case, we should
25691 not call gen_inlined_subroutine_die.
25692
25693 Later though, when cgraph asks dwarf2out to emit info
25694 for the concrete instance of the function decl into which
25695 the concrete instance of STMT got inlined, the later will lead
25696 to the generation of a DW_TAG_inlined_subroutine DIE. */
25697 if (! BLOCK_ABSTRACT (stmt))
25698 gen_inlined_subroutine_die (stmt, context_die);
25699 }
25700 else
25701 gen_lexical_block_die (stmt, context_die);
25702 }
25703 else
25704 decls_for_scope (stmt, context_die);
25705 }
25706
25707 /* Process variable DECL (or variable with origin ORIGIN) within
25708 block STMT and add it to CONTEXT_DIE. */
25709 static void
25710 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25711 {
25712 dw_die_ref die;
25713 tree decl_or_origin = decl ? decl : origin;
25714
25715 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25716 die = lookup_decl_die (decl_or_origin);
25717 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25718 {
25719 if (TYPE_DECL_IS_STUB (decl_or_origin))
25720 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25721 else
25722 die = lookup_decl_die (decl_or_origin);
25723 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25724 if (! die && ! early_dwarf)
25725 return;
25726 }
25727 else
25728 die = NULL;
25729
25730 /* Avoid creating DIEs for local typedefs and concrete static variables that
25731 will only be pruned later. */
25732 if ((origin || decl_ultimate_origin (decl))
25733 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25734 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25735 {
25736 origin = decl_ultimate_origin (decl_or_origin);
25737 if (decl && VAR_P (decl) && die != NULL)
25738 {
25739 die = lookup_decl_die (origin);
25740 if (die != NULL)
25741 equate_decl_number_to_die (decl, die);
25742 }
25743 return;
25744 }
25745
25746 if (die != NULL && die->die_parent == NULL)
25747 add_child_die (context_die, die);
25748 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25749 {
25750 if (early_dwarf)
25751 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25752 stmt, context_die);
25753 }
25754 else
25755 {
25756 if (decl && DECL_P (decl))
25757 {
25758 die = lookup_decl_die (decl);
25759
25760 /* Early created DIEs do not have a parent as the decls refer
25761 to the function as DECL_CONTEXT rather than the BLOCK. */
25762 if (die && die->die_parent == NULL)
25763 {
25764 gcc_assert (in_lto_p);
25765 add_child_die (context_die, die);
25766 }
25767 }
25768
25769 gen_decl_die (decl, origin, NULL, context_die);
25770 }
25771 }
25772
25773 /* Generate all of the decls declared within a given scope and (recursively)
25774 all of its sub-blocks. */
25775
25776 static void
25777 decls_for_scope (tree stmt, dw_die_ref context_die)
25778 {
25779 tree decl;
25780 unsigned int i;
25781 tree subblocks;
25782
25783 /* Ignore NULL blocks. */
25784 if (stmt == NULL_TREE)
25785 return;
25786
25787 /* Output the DIEs to represent all of the data objects and typedefs
25788 declared directly within this block but not within any nested
25789 sub-blocks. Also, nested function and tag DIEs have been
25790 generated with a parent of NULL; fix that up now. We don't
25791 have to do this if we're at -g1. */
25792 if (debug_info_level > DINFO_LEVEL_TERSE)
25793 {
25794 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25795 process_scope_var (stmt, decl, NULL_TREE, context_die);
25796 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25797 origin - avoid doing this twice as we have no good way to see
25798 if we've done it once already. */
25799 if (! early_dwarf)
25800 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25801 {
25802 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25803 if (decl == current_function_decl)
25804 /* Ignore declarations of the current function, while they
25805 are declarations, gen_subprogram_die would treat them
25806 as definitions again, because they are equal to
25807 current_function_decl and endlessly recurse. */;
25808 else if (TREE_CODE (decl) == FUNCTION_DECL)
25809 process_scope_var (stmt, decl, NULL_TREE, context_die);
25810 else
25811 process_scope_var (stmt, NULL_TREE, decl, context_die);
25812 }
25813 }
25814
25815 /* Even if we're at -g1, we need to process the subblocks in order to get
25816 inlined call information. */
25817
25818 /* Output the DIEs to represent all sub-blocks (and the items declared
25819 therein) of this block. */
25820 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25821 subblocks != NULL;
25822 subblocks = BLOCK_CHAIN (subblocks))
25823 gen_block_die (subblocks, context_die);
25824 }
25825
25826 /* Is this a typedef we can avoid emitting? */
25827
25828 bool
25829 is_redundant_typedef (const_tree decl)
25830 {
25831 if (TYPE_DECL_IS_STUB (decl))
25832 return true;
25833
25834 if (DECL_ARTIFICIAL (decl)
25835 && DECL_CONTEXT (decl)
25836 && is_tagged_type (DECL_CONTEXT (decl))
25837 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25838 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25839 /* Also ignore the artificial member typedef for the class name. */
25840 return true;
25841
25842 return false;
25843 }
25844
25845 /* Return TRUE if TYPE is a typedef that names a type for linkage
25846 purposes. This kind of typedefs is produced by the C++ FE for
25847 constructs like:
25848
25849 typedef struct {...} foo;
25850
25851 In that case, there is no typedef variant type produced for foo.
25852 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25853 struct type. */
25854
25855 static bool
25856 is_naming_typedef_decl (const_tree decl)
25857 {
25858 if (decl == NULL_TREE
25859 || TREE_CODE (decl) != TYPE_DECL
25860 || DECL_NAMELESS (decl)
25861 || !is_tagged_type (TREE_TYPE (decl))
25862 || DECL_IS_BUILTIN (decl)
25863 || is_redundant_typedef (decl)
25864 /* It looks like Ada produces TYPE_DECLs that are very similar
25865 to C++ naming typedefs but that have different
25866 semantics. Let's be specific to c++ for now. */
25867 || !is_cxx (decl))
25868 return FALSE;
25869
25870 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25871 && TYPE_NAME (TREE_TYPE (decl)) == decl
25872 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25873 != TYPE_NAME (TREE_TYPE (decl))));
25874 }
25875
25876 /* Looks up the DIE for a context. */
25877
25878 static inline dw_die_ref
25879 lookup_context_die (tree context)
25880 {
25881 if (context)
25882 {
25883 /* Find die that represents this context. */
25884 if (TYPE_P (context))
25885 {
25886 context = TYPE_MAIN_VARIANT (context);
25887 dw_die_ref ctx = lookup_type_die (context);
25888 if (!ctx)
25889 return NULL;
25890 return strip_naming_typedef (context, ctx);
25891 }
25892 else
25893 return lookup_decl_die (context);
25894 }
25895 return comp_unit_die ();
25896 }
25897
25898 /* Returns the DIE for a context. */
25899
25900 static inline dw_die_ref
25901 get_context_die (tree context)
25902 {
25903 if (context)
25904 {
25905 /* Find die that represents this context. */
25906 if (TYPE_P (context))
25907 {
25908 context = TYPE_MAIN_VARIANT (context);
25909 return strip_naming_typedef (context, force_type_die (context));
25910 }
25911 else
25912 return force_decl_die (context);
25913 }
25914 return comp_unit_die ();
25915 }
25916
25917 /* Returns the DIE for decl. A DIE will always be returned. */
25918
25919 static dw_die_ref
25920 force_decl_die (tree decl)
25921 {
25922 dw_die_ref decl_die;
25923 unsigned saved_external_flag;
25924 tree save_fn = NULL_TREE;
25925 decl_die = lookup_decl_die (decl);
25926 if (!decl_die)
25927 {
25928 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25929
25930 decl_die = lookup_decl_die (decl);
25931 if (decl_die)
25932 return decl_die;
25933
25934 switch (TREE_CODE (decl))
25935 {
25936 case FUNCTION_DECL:
25937 /* Clear current_function_decl, so that gen_subprogram_die thinks
25938 that this is a declaration. At this point, we just want to force
25939 declaration die. */
25940 save_fn = current_function_decl;
25941 current_function_decl = NULL_TREE;
25942 gen_subprogram_die (decl, context_die);
25943 current_function_decl = save_fn;
25944 break;
25945
25946 case VAR_DECL:
25947 /* Set external flag to force declaration die. Restore it after
25948 gen_decl_die() call. */
25949 saved_external_flag = DECL_EXTERNAL (decl);
25950 DECL_EXTERNAL (decl) = 1;
25951 gen_decl_die (decl, NULL, NULL, context_die);
25952 DECL_EXTERNAL (decl) = saved_external_flag;
25953 break;
25954
25955 case NAMESPACE_DECL:
25956 if (dwarf_version >= 3 || !dwarf_strict)
25957 dwarf2out_decl (decl);
25958 else
25959 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25960 decl_die = comp_unit_die ();
25961 break;
25962
25963 case TRANSLATION_UNIT_DECL:
25964 decl_die = comp_unit_die ();
25965 break;
25966
25967 default:
25968 gcc_unreachable ();
25969 }
25970
25971 /* We should be able to find the DIE now. */
25972 if (!decl_die)
25973 decl_die = lookup_decl_die (decl);
25974 gcc_assert (decl_die);
25975 }
25976
25977 return decl_die;
25978 }
25979
25980 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25981 always returned. */
25982
25983 static dw_die_ref
25984 force_type_die (tree type)
25985 {
25986 dw_die_ref type_die;
25987
25988 type_die = lookup_type_die (type);
25989 if (!type_die)
25990 {
25991 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25992
25993 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25994 false, context_die);
25995 gcc_assert (type_die);
25996 }
25997 return type_die;
25998 }
25999
26000 /* Force out any required namespaces to be able to output DECL,
26001 and return the new context_die for it, if it's changed. */
26002
26003 static dw_die_ref
26004 setup_namespace_context (tree thing, dw_die_ref context_die)
26005 {
26006 tree context = (DECL_P (thing)
26007 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26008 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26009 /* Force out the namespace. */
26010 context_die = force_decl_die (context);
26011
26012 return context_die;
26013 }
26014
26015 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26016 type) within its namespace, if appropriate.
26017
26018 For compatibility with older debuggers, namespace DIEs only contain
26019 declarations; all definitions are emitted at CU scope, with
26020 DW_AT_specification pointing to the declaration (like with class
26021 members). */
26022
26023 static dw_die_ref
26024 declare_in_namespace (tree thing, dw_die_ref context_die)
26025 {
26026 dw_die_ref ns_context;
26027
26028 if (debug_info_level <= DINFO_LEVEL_TERSE)
26029 return context_die;
26030
26031 /* External declarations in the local scope only need to be emitted
26032 once, not once in the namespace and once in the scope.
26033
26034 This avoids declaring the `extern' below in the
26035 namespace DIE as well as in the innermost scope:
26036
26037 namespace S
26038 {
26039 int i=5;
26040 int foo()
26041 {
26042 int i=8;
26043 extern int i;
26044 return i;
26045 }
26046 }
26047 */
26048 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26049 return context_die;
26050
26051 /* If this decl is from an inlined function, then don't try to emit it in its
26052 namespace, as we will get confused. It would have already been emitted
26053 when the abstract instance of the inline function was emitted anyways. */
26054 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26055 return context_die;
26056
26057 ns_context = setup_namespace_context (thing, context_die);
26058
26059 if (ns_context != context_die)
26060 {
26061 if (is_fortran ())
26062 return ns_context;
26063 if (DECL_P (thing))
26064 gen_decl_die (thing, NULL, NULL, ns_context);
26065 else
26066 gen_type_die (thing, ns_context);
26067 }
26068 return context_die;
26069 }
26070
26071 /* Generate a DIE for a namespace or namespace alias. */
26072
26073 static void
26074 gen_namespace_die (tree decl, dw_die_ref context_die)
26075 {
26076 dw_die_ref namespace_die;
26077
26078 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26079 they are an alias of. */
26080 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26081 {
26082 /* Output a real namespace or module. */
26083 context_die = setup_namespace_context (decl, comp_unit_die ());
26084 namespace_die = new_die (is_fortran ()
26085 ? DW_TAG_module : DW_TAG_namespace,
26086 context_die, decl);
26087 /* For Fortran modules defined in different CU don't add src coords. */
26088 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26089 {
26090 const char *name = dwarf2_name (decl, 0);
26091 if (name)
26092 add_name_attribute (namespace_die, name);
26093 }
26094 else
26095 add_name_and_src_coords_attributes (namespace_die, decl);
26096 if (DECL_EXTERNAL (decl))
26097 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26098 equate_decl_number_to_die (decl, namespace_die);
26099 }
26100 else
26101 {
26102 /* Output a namespace alias. */
26103
26104 /* Force out the namespace we are an alias of, if necessary. */
26105 dw_die_ref origin_die
26106 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26107
26108 if (DECL_FILE_SCOPE_P (decl)
26109 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26110 context_die = setup_namespace_context (decl, comp_unit_die ());
26111 /* Now create the namespace alias DIE. */
26112 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26113 add_name_and_src_coords_attributes (namespace_die, decl);
26114 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26115 equate_decl_number_to_die (decl, namespace_die);
26116 }
26117 if ((dwarf_version >= 5 || !dwarf_strict)
26118 && lang_hooks.decls.decl_dwarf_attribute (decl,
26119 DW_AT_export_symbols) == 1)
26120 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26121
26122 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26123 if (want_pubnames ())
26124 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26125 }
26126
26127 /* Generate Dwarf debug information for a decl described by DECL.
26128 The return value is currently only meaningful for PARM_DECLs,
26129 for all other decls it returns NULL.
26130
26131 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26132 It can be NULL otherwise. */
26133
26134 static dw_die_ref
26135 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26136 dw_die_ref context_die)
26137 {
26138 tree decl_or_origin = decl ? decl : origin;
26139 tree class_origin = NULL, ultimate_origin;
26140
26141 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26142 return NULL;
26143
26144 /* Ignore pointer bounds decls. */
26145 if (DECL_P (decl_or_origin)
26146 && TREE_TYPE (decl_or_origin)
26147 && POINTER_BOUNDS_P (decl_or_origin))
26148 return NULL;
26149
26150 switch (TREE_CODE (decl_or_origin))
26151 {
26152 case ERROR_MARK:
26153 break;
26154
26155 case CONST_DECL:
26156 if (!is_fortran () && !is_ada ())
26157 {
26158 /* The individual enumerators of an enum type get output when we output
26159 the Dwarf representation of the relevant enum type itself. */
26160 break;
26161 }
26162
26163 /* Emit its type. */
26164 gen_type_die (TREE_TYPE (decl), context_die);
26165
26166 /* And its containing namespace. */
26167 context_die = declare_in_namespace (decl, context_die);
26168
26169 gen_const_die (decl, context_die);
26170 break;
26171
26172 case FUNCTION_DECL:
26173 #if 0
26174 /* FIXME */
26175 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26176 on local redeclarations of global functions. That seems broken. */
26177 if (current_function_decl != decl)
26178 /* This is only a declaration. */;
26179 #endif
26180
26181 /* We should have abstract copies already and should not generate
26182 stray type DIEs in late LTO dumping. */
26183 if (! early_dwarf)
26184 ;
26185
26186 /* If we're emitting a clone, emit info for the abstract instance. */
26187 else if (origin || DECL_ORIGIN (decl) != decl)
26188 dwarf2out_abstract_function (origin
26189 ? DECL_ORIGIN (origin)
26190 : DECL_ABSTRACT_ORIGIN (decl));
26191
26192 /* If we're emitting a possibly inlined function emit it as
26193 abstract instance. */
26194 else if (cgraph_function_possibly_inlined_p (decl)
26195 && ! DECL_ABSTRACT_P (decl)
26196 && ! class_or_namespace_scope_p (context_die)
26197 /* dwarf2out_abstract_function won't emit a die if this is just
26198 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26199 that case, because that works only if we have a die. */
26200 && DECL_INITIAL (decl) != NULL_TREE)
26201 dwarf2out_abstract_function (decl);
26202
26203 /* Otherwise we're emitting the primary DIE for this decl. */
26204 else if (debug_info_level > DINFO_LEVEL_TERSE)
26205 {
26206 /* Before we describe the FUNCTION_DECL itself, make sure that we
26207 have its containing type. */
26208 if (!origin)
26209 origin = decl_class_context (decl);
26210 if (origin != NULL_TREE)
26211 gen_type_die (origin, context_die);
26212
26213 /* And its return type. */
26214 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26215
26216 /* And its virtual context. */
26217 if (DECL_VINDEX (decl) != NULL_TREE)
26218 gen_type_die (DECL_CONTEXT (decl), context_die);
26219
26220 /* Make sure we have a member DIE for decl. */
26221 if (origin != NULL_TREE)
26222 gen_type_die_for_member (origin, decl, context_die);
26223
26224 /* And its containing namespace. */
26225 context_die = declare_in_namespace (decl, context_die);
26226 }
26227
26228 /* Now output a DIE to represent the function itself. */
26229 if (decl)
26230 gen_subprogram_die (decl, context_die);
26231 break;
26232
26233 case TYPE_DECL:
26234 /* If we are in terse mode, don't generate any DIEs to represent any
26235 actual typedefs. */
26236 if (debug_info_level <= DINFO_LEVEL_TERSE)
26237 break;
26238
26239 /* In the special case of a TYPE_DECL node representing the declaration
26240 of some type tag, if the given TYPE_DECL is marked as having been
26241 instantiated from some other (original) TYPE_DECL node (e.g. one which
26242 was generated within the original definition of an inline function) we
26243 used to generate a special (abbreviated) DW_TAG_structure_type,
26244 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26245 should be actually referencing those DIEs, as variable DIEs with that
26246 type would be emitted already in the abstract origin, so it was always
26247 removed during unused type prunning. Don't add anything in this
26248 case. */
26249 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26250 break;
26251
26252 if (is_redundant_typedef (decl))
26253 gen_type_die (TREE_TYPE (decl), context_die);
26254 else
26255 /* Output a DIE to represent the typedef itself. */
26256 gen_typedef_die (decl, context_die);
26257 break;
26258
26259 case LABEL_DECL:
26260 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26261 gen_label_die (decl, context_die);
26262 break;
26263
26264 case VAR_DECL:
26265 case RESULT_DECL:
26266 /* If we are in terse mode, don't generate any DIEs to represent any
26267 variable declarations or definitions. */
26268 if (debug_info_level <= DINFO_LEVEL_TERSE)
26269 break;
26270
26271 /* Avoid generating stray type DIEs during late dwarf dumping.
26272 All types have been dumped early. */
26273 if (early_dwarf
26274 /* ??? But in LTRANS we cannot annotate early created variably
26275 modified type DIEs without copying them and adjusting all
26276 references to them. Dump them again as happens for inlining
26277 which copies both the decl and the types. */
26278 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26279 in VLA bound information for example. */
26280 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26281 current_function_decl)))
26282 {
26283 /* Output any DIEs that are needed to specify the type of this data
26284 object. */
26285 if (decl_by_reference_p (decl_or_origin))
26286 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26287 else
26288 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26289 }
26290
26291 if (early_dwarf)
26292 {
26293 /* And its containing type. */
26294 class_origin = decl_class_context (decl_or_origin);
26295 if (class_origin != NULL_TREE)
26296 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26297
26298 /* And its containing namespace. */
26299 context_die = declare_in_namespace (decl_or_origin, context_die);
26300 }
26301
26302 /* Now output the DIE to represent the data object itself. This gets
26303 complicated because of the possibility that the VAR_DECL really
26304 represents an inlined instance of a formal parameter for an inline
26305 function. */
26306 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26307 if (ultimate_origin != NULL_TREE
26308 && TREE_CODE (ultimate_origin) == PARM_DECL)
26309 gen_formal_parameter_die (decl, origin,
26310 true /* Emit name attribute. */,
26311 context_die);
26312 else
26313 gen_variable_die (decl, origin, context_die);
26314 break;
26315
26316 case FIELD_DECL:
26317 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26318 /* Ignore the nameless fields that are used to skip bits but handle C++
26319 anonymous unions and structs. */
26320 if (DECL_NAME (decl) != NULL_TREE
26321 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26322 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26323 {
26324 gen_type_die (member_declared_type (decl), context_die);
26325 gen_field_die (decl, ctx, context_die);
26326 }
26327 break;
26328
26329 case PARM_DECL:
26330 /* Avoid generating stray type DIEs during late dwarf dumping.
26331 All types have been dumped early. */
26332 if (early_dwarf
26333 /* ??? But in LTRANS we cannot annotate early created variably
26334 modified type DIEs without copying them and adjusting all
26335 references to them. Dump them again as happens for inlining
26336 which copies both the decl and the types. */
26337 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26338 in VLA bound information for example. */
26339 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26340 current_function_decl)))
26341 {
26342 if (DECL_BY_REFERENCE (decl_or_origin))
26343 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26344 else
26345 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26346 }
26347 return gen_formal_parameter_die (decl, origin,
26348 true /* Emit name attribute. */,
26349 context_die);
26350
26351 case NAMESPACE_DECL:
26352 if (dwarf_version >= 3 || !dwarf_strict)
26353 gen_namespace_die (decl, context_die);
26354 break;
26355
26356 case IMPORTED_DECL:
26357 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26358 DECL_CONTEXT (decl), context_die);
26359 break;
26360
26361 case NAMELIST_DECL:
26362 gen_namelist_decl (DECL_NAME (decl), context_die,
26363 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26364 break;
26365
26366 default:
26367 /* Probably some frontend-internal decl. Assume we don't care. */
26368 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26369 break;
26370 }
26371
26372 return NULL;
26373 }
26374 \f
26375 /* Output initial debug information for global DECL. Called at the
26376 end of the parsing process.
26377
26378 This is the initial debug generation process. As such, the DIEs
26379 generated may be incomplete. A later debug generation pass
26380 (dwarf2out_late_global_decl) will augment the information generated
26381 in this pass (e.g., with complete location info). */
26382
26383 static void
26384 dwarf2out_early_global_decl (tree decl)
26385 {
26386 set_early_dwarf s;
26387
26388 /* gen_decl_die() will set DECL_ABSTRACT because
26389 cgraph_function_possibly_inlined_p() returns true. This is in
26390 turn will cause DW_AT_inline attributes to be set.
26391
26392 This happens because at early dwarf generation, there is no
26393 cgraph information, causing cgraph_function_possibly_inlined_p()
26394 to return true. Trick cgraph_function_possibly_inlined_p()
26395 while we generate dwarf early. */
26396 bool save = symtab->global_info_ready;
26397 symtab->global_info_ready = true;
26398
26399 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26400 other DECLs and they can point to template types or other things
26401 that dwarf2out can't handle when done via dwarf2out_decl. */
26402 if (TREE_CODE (decl) != TYPE_DECL
26403 && TREE_CODE (decl) != PARM_DECL)
26404 {
26405 if (TREE_CODE (decl) == FUNCTION_DECL)
26406 {
26407 tree save_fndecl = current_function_decl;
26408
26409 /* For nested functions, make sure we have DIEs for the parents first
26410 so that all nested DIEs are generated at the proper scope in the
26411 first shot. */
26412 tree context = decl_function_context (decl);
26413 if (context != NULL)
26414 {
26415 dw_die_ref context_die = lookup_decl_die (context);
26416 current_function_decl = context;
26417
26418 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26419 enough so that it lands in its own context. This avoids type
26420 pruning issues later on. */
26421 if (context_die == NULL || is_declaration_die (context_die))
26422 dwarf2out_decl (context);
26423 }
26424
26425 /* Emit an abstract origin of a function first. This happens
26426 with C++ constructor clones for example and makes
26427 dwarf2out_abstract_function happy which requires the early
26428 DIE of the abstract instance to be present. */
26429 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26430 dw_die_ref origin_die;
26431 if (origin != NULL
26432 /* Do not emit the DIE multiple times but make sure to
26433 process it fully here in case we just saw a declaration. */
26434 && ((origin_die = lookup_decl_die (origin)) == NULL
26435 || is_declaration_die (origin_die)))
26436 {
26437 current_function_decl = origin;
26438 dwarf2out_decl (origin);
26439 }
26440
26441 /* Emit the DIE for decl but avoid doing that multiple times. */
26442 dw_die_ref old_die;
26443 if ((old_die = lookup_decl_die (decl)) == NULL
26444 || is_declaration_die (old_die))
26445 {
26446 current_function_decl = decl;
26447 dwarf2out_decl (decl);
26448 }
26449
26450 current_function_decl = save_fndecl;
26451 }
26452 else
26453 dwarf2out_decl (decl);
26454 }
26455 symtab->global_info_ready = save;
26456 }
26457
26458 /* Return whether EXPR is an expression with the following pattern:
26459 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26460
26461 static bool
26462 is_trivial_indirect_ref (tree expr)
26463 {
26464 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26465 return false;
26466
26467 tree nop = TREE_OPERAND (expr, 0);
26468 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26469 return false;
26470
26471 tree int_cst = TREE_OPERAND (nop, 0);
26472 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26473 }
26474
26475 /* Output debug information for global decl DECL. Called from
26476 toplev.c after compilation proper has finished. */
26477
26478 static void
26479 dwarf2out_late_global_decl (tree decl)
26480 {
26481 /* Fill-in any location information we were unable to determine
26482 on the first pass. */
26483 if (VAR_P (decl) && !POINTER_BOUNDS_P (decl))
26484 {
26485 dw_die_ref die = lookup_decl_die (decl);
26486
26487 /* We may have to generate early debug late for LTO in case debug
26488 was not enabled at compile-time or the target doesn't support
26489 the LTO early debug scheme. */
26490 if (! die && in_lto_p)
26491 {
26492 dwarf2out_decl (decl);
26493 die = lookup_decl_die (decl);
26494 }
26495
26496 if (die)
26497 {
26498 /* We get called via the symtab code invoking late_global_decl
26499 for symbols that are optimized out.
26500
26501 Do not add locations for those, except if they have a
26502 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26503 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26504 INDIRECT_REF expression, as this could generate relocations to
26505 text symbols in LTO object files, which is invalid. */
26506 varpool_node *node = varpool_node::get (decl);
26507 if ((! node || ! node->definition)
26508 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26509 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26510 tree_add_const_value_attribute_for_decl (die, decl);
26511 else
26512 add_location_or_const_value_attribute (die, decl, false);
26513 }
26514 }
26515 }
26516
26517 /* Output debug information for type decl DECL. Called from toplev.c
26518 and from language front ends (to record built-in types). */
26519 static void
26520 dwarf2out_type_decl (tree decl, int local)
26521 {
26522 if (!local)
26523 {
26524 set_early_dwarf s;
26525 dwarf2out_decl (decl);
26526 }
26527 }
26528
26529 /* Output debug information for imported module or decl DECL.
26530 NAME is non-NULL name in the lexical block if the decl has been renamed.
26531 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26532 that DECL belongs to.
26533 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26534 static void
26535 dwarf2out_imported_module_or_decl_1 (tree decl,
26536 tree name,
26537 tree lexical_block,
26538 dw_die_ref lexical_block_die)
26539 {
26540 expanded_location xloc;
26541 dw_die_ref imported_die = NULL;
26542 dw_die_ref at_import_die;
26543
26544 if (TREE_CODE (decl) == IMPORTED_DECL)
26545 {
26546 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26547 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26548 gcc_assert (decl);
26549 }
26550 else
26551 xloc = expand_location (input_location);
26552
26553 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26554 {
26555 at_import_die = force_type_die (TREE_TYPE (decl));
26556 /* For namespace N { typedef void T; } using N::T; base_type_die
26557 returns NULL, but DW_TAG_imported_declaration requires
26558 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26559 if (!at_import_die)
26560 {
26561 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26562 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26563 at_import_die = lookup_type_die (TREE_TYPE (decl));
26564 gcc_assert (at_import_die);
26565 }
26566 }
26567 else
26568 {
26569 at_import_die = lookup_decl_die (decl);
26570 if (!at_import_die)
26571 {
26572 /* If we're trying to avoid duplicate debug info, we may not have
26573 emitted the member decl for this field. Emit it now. */
26574 if (TREE_CODE (decl) == FIELD_DECL)
26575 {
26576 tree type = DECL_CONTEXT (decl);
26577
26578 if (TYPE_CONTEXT (type)
26579 && TYPE_P (TYPE_CONTEXT (type))
26580 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26581 DINFO_USAGE_DIR_USE))
26582 return;
26583 gen_type_die_for_member (type, decl,
26584 get_context_die (TYPE_CONTEXT (type)));
26585 }
26586 if (TREE_CODE (decl) == NAMELIST_DECL)
26587 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26588 get_context_die (DECL_CONTEXT (decl)),
26589 NULL_TREE);
26590 else
26591 at_import_die = force_decl_die (decl);
26592 }
26593 }
26594
26595 if (TREE_CODE (decl) == NAMESPACE_DECL)
26596 {
26597 if (dwarf_version >= 3 || !dwarf_strict)
26598 imported_die = new_die (DW_TAG_imported_module,
26599 lexical_block_die,
26600 lexical_block);
26601 else
26602 return;
26603 }
26604 else
26605 imported_die = new_die (DW_TAG_imported_declaration,
26606 lexical_block_die,
26607 lexical_block);
26608
26609 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26610 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26611 if (debug_column_info && xloc.column)
26612 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26613 if (name)
26614 add_AT_string (imported_die, DW_AT_name,
26615 IDENTIFIER_POINTER (name));
26616 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26617 }
26618
26619 /* Output debug information for imported module or decl DECL.
26620 NAME is non-NULL name in context if the decl has been renamed.
26621 CHILD is true if decl is one of the renamed decls as part of
26622 importing whole module.
26623 IMPLICIT is set if this hook is called for an implicit import
26624 such as inline namespace. */
26625
26626 static void
26627 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26628 bool child, bool implicit)
26629 {
26630 /* dw_die_ref at_import_die; */
26631 dw_die_ref scope_die;
26632
26633 if (debug_info_level <= DINFO_LEVEL_TERSE)
26634 return;
26635
26636 gcc_assert (decl);
26637
26638 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26639 should be enough, for DWARF4 and older even if we emit as extension
26640 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26641 for the benefit of consumers unaware of DW_AT_export_symbols. */
26642 if (implicit
26643 && dwarf_version >= 5
26644 && lang_hooks.decls.decl_dwarf_attribute (decl,
26645 DW_AT_export_symbols) == 1)
26646 return;
26647
26648 set_early_dwarf s;
26649
26650 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26651 We need decl DIE for reference and scope die. First, get DIE for the decl
26652 itself. */
26653
26654 /* Get the scope die for decl context. Use comp_unit_die for global module
26655 or decl. If die is not found for non globals, force new die. */
26656 if (context
26657 && TYPE_P (context)
26658 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26659 return;
26660
26661 scope_die = get_context_die (context);
26662
26663 if (child)
26664 {
26665 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26666 there is nothing we can do, here. */
26667 if (dwarf_version < 3 && dwarf_strict)
26668 return;
26669
26670 gcc_assert (scope_die->die_child);
26671 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26672 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26673 scope_die = scope_die->die_child;
26674 }
26675
26676 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26677 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26678 }
26679
26680 /* Output debug information for namelists. */
26681
26682 static dw_die_ref
26683 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26684 {
26685 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26686 tree value;
26687 unsigned i;
26688
26689 if (debug_info_level <= DINFO_LEVEL_TERSE)
26690 return NULL;
26691
26692 gcc_assert (scope_die != NULL);
26693 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26694 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26695
26696 /* If there are no item_decls, we have a nondefining namelist, e.g.
26697 with USE association; hence, set DW_AT_declaration. */
26698 if (item_decls == NULL_TREE)
26699 {
26700 add_AT_flag (nml_die, DW_AT_declaration, 1);
26701 return nml_die;
26702 }
26703
26704 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26705 {
26706 nml_item_ref_die = lookup_decl_die (value);
26707 if (!nml_item_ref_die)
26708 nml_item_ref_die = force_decl_die (value);
26709
26710 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26711 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26712 }
26713 return nml_die;
26714 }
26715
26716
26717 /* Write the debugging output for DECL and return the DIE. */
26718
26719 static void
26720 dwarf2out_decl (tree decl)
26721 {
26722 dw_die_ref context_die = comp_unit_die ();
26723
26724 switch (TREE_CODE (decl))
26725 {
26726 case ERROR_MARK:
26727 return;
26728
26729 case FUNCTION_DECL:
26730 /* If we're a nested function, initially use a parent of NULL; if we're
26731 a plain function, this will be fixed up in decls_for_scope. If
26732 we're a method, it will be ignored, since we already have a DIE. */
26733 if (decl_function_context (decl)
26734 /* But if we're in terse mode, we don't care about scope. */
26735 && debug_info_level > DINFO_LEVEL_TERSE)
26736 context_die = NULL;
26737 break;
26738
26739 case VAR_DECL:
26740 /* For local statics lookup proper context die. */
26741 if (local_function_static (decl))
26742 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26743
26744 /* If we are in terse mode, don't generate any DIEs to represent any
26745 variable declarations or definitions. */
26746 if (debug_info_level <= DINFO_LEVEL_TERSE)
26747 return;
26748 break;
26749
26750 case CONST_DECL:
26751 if (debug_info_level <= DINFO_LEVEL_TERSE)
26752 return;
26753 if (!is_fortran () && !is_ada ())
26754 return;
26755 if (TREE_STATIC (decl) && decl_function_context (decl))
26756 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26757 break;
26758
26759 case NAMESPACE_DECL:
26760 case IMPORTED_DECL:
26761 if (debug_info_level <= DINFO_LEVEL_TERSE)
26762 return;
26763 if (lookup_decl_die (decl) != NULL)
26764 return;
26765 break;
26766
26767 case TYPE_DECL:
26768 /* Don't emit stubs for types unless they are needed by other DIEs. */
26769 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26770 return;
26771
26772 /* Don't bother trying to generate any DIEs to represent any of the
26773 normal built-in types for the language we are compiling. */
26774 if (DECL_IS_BUILTIN (decl))
26775 return;
26776
26777 /* If we are in terse mode, don't generate any DIEs for types. */
26778 if (debug_info_level <= DINFO_LEVEL_TERSE)
26779 return;
26780
26781 /* If we're a function-scope tag, initially use a parent of NULL;
26782 this will be fixed up in decls_for_scope. */
26783 if (decl_function_context (decl))
26784 context_die = NULL;
26785
26786 break;
26787
26788 case NAMELIST_DECL:
26789 break;
26790
26791 default:
26792 return;
26793 }
26794
26795 gen_decl_die (decl, NULL, NULL, context_die);
26796
26797 if (flag_checking)
26798 {
26799 dw_die_ref die = lookup_decl_die (decl);
26800 if (die)
26801 check_die (die);
26802 }
26803 }
26804
26805 /* Write the debugging output for DECL. */
26806
26807 static void
26808 dwarf2out_function_decl (tree decl)
26809 {
26810 dwarf2out_decl (decl);
26811 call_arg_locations = NULL;
26812 call_arg_loc_last = NULL;
26813 call_site_count = -1;
26814 tail_call_site_count = -1;
26815 decl_loc_table->empty ();
26816 cached_dw_loc_list_table->empty ();
26817 }
26818
26819 /* Output a marker (i.e. a label) for the beginning of the generated code for
26820 a lexical block. */
26821
26822 static void
26823 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26824 unsigned int blocknum)
26825 {
26826 switch_to_section (current_function_section ());
26827 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26828 }
26829
26830 /* Output a marker (i.e. a label) for the end of the generated code for a
26831 lexical block. */
26832
26833 static void
26834 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26835 {
26836 switch_to_section (current_function_section ());
26837 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26838 }
26839
26840 /* Returns nonzero if it is appropriate not to emit any debugging
26841 information for BLOCK, because it doesn't contain any instructions.
26842
26843 Don't allow this for blocks with nested functions or local classes
26844 as we would end up with orphans, and in the presence of scheduling
26845 we may end up calling them anyway. */
26846
26847 static bool
26848 dwarf2out_ignore_block (const_tree block)
26849 {
26850 tree decl;
26851 unsigned int i;
26852
26853 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26854 if (TREE_CODE (decl) == FUNCTION_DECL
26855 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26856 return 0;
26857 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26858 {
26859 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26860 if (TREE_CODE (decl) == FUNCTION_DECL
26861 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26862 return 0;
26863 }
26864
26865 return 1;
26866 }
26867
26868 /* Hash table routines for file_hash. */
26869
26870 bool
26871 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26872 {
26873 return filename_cmp (p1->filename, p2) == 0;
26874 }
26875
26876 hashval_t
26877 dwarf_file_hasher::hash (dwarf_file_data *p)
26878 {
26879 return htab_hash_string (p->filename);
26880 }
26881
26882 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26883 dwarf2out.c) and return its "index". The index of each (known) filename is
26884 just a unique number which is associated with only that one filename. We
26885 need such numbers for the sake of generating labels (in the .debug_sfnames
26886 section) and references to those files numbers (in the .debug_srcinfo
26887 and .debug_macinfo sections). If the filename given as an argument is not
26888 found in our current list, add it to the list and assign it the next
26889 available unique index number. */
26890
26891 static struct dwarf_file_data *
26892 lookup_filename (const char *file_name)
26893 {
26894 struct dwarf_file_data * created;
26895
26896 if (!file_name)
26897 return NULL;
26898
26899 dwarf_file_data **slot
26900 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26901 INSERT);
26902 if (*slot)
26903 return *slot;
26904
26905 created = ggc_alloc<dwarf_file_data> ();
26906 created->filename = file_name;
26907 created->emitted_number = 0;
26908 *slot = created;
26909 return created;
26910 }
26911
26912 /* If the assembler will construct the file table, then translate the compiler
26913 internal file table number into the assembler file table number, and emit
26914 a .file directive if we haven't already emitted one yet. The file table
26915 numbers are different because we prune debug info for unused variables and
26916 types, which may include filenames. */
26917
26918 static int
26919 maybe_emit_file (struct dwarf_file_data * fd)
26920 {
26921 if (! fd->emitted_number)
26922 {
26923 if (last_emitted_file)
26924 fd->emitted_number = last_emitted_file->emitted_number + 1;
26925 else
26926 fd->emitted_number = 1;
26927 last_emitted_file = fd;
26928
26929 if (output_asm_line_debug_info ())
26930 {
26931 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26932 output_quoted_string (asm_out_file,
26933 remap_debug_filename (fd->filename));
26934 fputc ('\n', asm_out_file);
26935 }
26936 }
26937
26938 return fd->emitted_number;
26939 }
26940
26941 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26942 That generation should happen after function debug info has been
26943 generated. The value of the attribute is the constant value of ARG. */
26944
26945 static void
26946 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26947 {
26948 die_arg_entry entry;
26949
26950 if (!die || !arg)
26951 return;
26952
26953 gcc_assert (early_dwarf);
26954
26955 if (!tmpl_value_parm_die_table)
26956 vec_alloc (tmpl_value_parm_die_table, 32);
26957
26958 entry.die = die;
26959 entry.arg = arg;
26960 vec_safe_push (tmpl_value_parm_die_table, entry);
26961 }
26962
26963 /* Return TRUE if T is an instance of generic type, FALSE
26964 otherwise. */
26965
26966 static bool
26967 generic_type_p (tree t)
26968 {
26969 if (t == NULL_TREE || !TYPE_P (t))
26970 return false;
26971 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26972 }
26973
26974 /* Schedule the generation of the generic parameter dies for the
26975 instance of generic type T. The proper generation itself is later
26976 done by gen_scheduled_generic_parms_dies. */
26977
26978 static void
26979 schedule_generic_params_dies_gen (tree t)
26980 {
26981 if (!generic_type_p (t))
26982 return;
26983
26984 gcc_assert (early_dwarf);
26985
26986 if (!generic_type_instances)
26987 vec_alloc (generic_type_instances, 256);
26988
26989 vec_safe_push (generic_type_instances, t);
26990 }
26991
26992 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26993 by append_entry_to_tmpl_value_parm_die_table. This function must
26994 be called after function DIEs have been generated. */
26995
26996 static void
26997 gen_remaining_tmpl_value_param_die_attribute (void)
26998 {
26999 if (tmpl_value_parm_die_table)
27000 {
27001 unsigned i, j;
27002 die_arg_entry *e;
27003
27004 /* We do this in two phases - first get the cases we can
27005 handle during early-finish, preserving those we cannot
27006 (containing symbolic constants where we don't yet know
27007 whether we are going to output the referenced symbols).
27008 For those we try again at late-finish. */
27009 j = 0;
27010 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27011 {
27012 if (!e->die->removed
27013 && !tree_add_const_value_attribute (e->die, e->arg))
27014 {
27015 dw_loc_descr_ref loc = NULL;
27016 if (! early_dwarf
27017 && (dwarf_version >= 5 || !dwarf_strict))
27018 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27019 if (loc)
27020 add_AT_loc (e->die, DW_AT_location, loc);
27021 else
27022 (*tmpl_value_parm_die_table)[j++] = *e;
27023 }
27024 }
27025 tmpl_value_parm_die_table->truncate (j);
27026 }
27027 }
27028
27029 /* Generate generic parameters DIEs for instances of generic types
27030 that have been previously scheduled by
27031 schedule_generic_params_dies_gen. This function must be called
27032 after all the types of the CU have been laid out. */
27033
27034 static void
27035 gen_scheduled_generic_parms_dies (void)
27036 {
27037 unsigned i;
27038 tree t;
27039
27040 if (!generic_type_instances)
27041 return;
27042
27043 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27044 if (COMPLETE_TYPE_P (t))
27045 gen_generic_params_dies (t);
27046
27047 generic_type_instances = NULL;
27048 }
27049
27050
27051 /* Replace DW_AT_name for the decl with name. */
27052
27053 static void
27054 dwarf2out_set_name (tree decl, tree name)
27055 {
27056 dw_die_ref die;
27057 dw_attr_node *attr;
27058 const char *dname;
27059
27060 die = TYPE_SYMTAB_DIE (decl);
27061 if (!die)
27062 return;
27063
27064 dname = dwarf2_name (name, 0);
27065 if (!dname)
27066 return;
27067
27068 attr = get_AT (die, DW_AT_name);
27069 if (attr)
27070 {
27071 struct indirect_string_node *node;
27072
27073 node = find_AT_string (dname);
27074 /* replace the string. */
27075 attr->dw_attr_val.v.val_str = node;
27076 }
27077
27078 else
27079 add_name_attribute (die, dname);
27080 }
27081
27082 /* True if before or during processing of the first function being emitted. */
27083 static bool in_first_function_p = true;
27084 /* True if loc_note during dwarf2out_var_location call might still be
27085 before first real instruction at address equal to .Ltext0. */
27086 static bool maybe_at_text_label_p = true;
27087 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27088 static unsigned int first_loclabel_num_not_at_text_label;
27089
27090 /* Look ahead for a real insn, or for a begin stmt marker. */
27091
27092 static rtx_insn *
27093 dwarf2out_next_real_insn (rtx_insn *loc_note)
27094 {
27095 rtx_insn *next_real = NEXT_INSN (loc_note);
27096
27097 while (next_real)
27098 if (INSN_P (next_real))
27099 break;
27100 else
27101 next_real = NEXT_INSN (next_real);
27102
27103 return next_real;
27104 }
27105
27106 /* Called by the final INSN scan whenever we see a var location. We
27107 use it to drop labels in the right places, and throw the location in
27108 our lookup table. */
27109
27110 static void
27111 dwarf2out_var_location (rtx_insn *loc_note)
27112 {
27113 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27114 struct var_loc_node *newloc;
27115 rtx_insn *next_real, *next_note;
27116 rtx_insn *call_insn = NULL;
27117 static const char *last_label;
27118 static const char *last_postcall_label;
27119 static bool last_in_cold_section_p;
27120 static rtx_insn *expected_next_loc_note;
27121 tree decl;
27122 bool var_loc_p;
27123 var_loc_view view = 0;
27124
27125 if (!NOTE_P (loc_note))
27126 {
27127 if (CALL_P (loc_note))
27128 {
27129 maybe_reset_location_view (loc_note, cur_line_info_table);
27130 call_site_count++;
27131 if (SIBLING_CALL_P (loc_note))
27132 tail_call_site_count++;
27133 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27134 {
27135 call_insn = loc_note;
27136 loc_note = NULL;
27137 var_loc_p = false;
27138
27139 next_real = dwarf2out_next_real_insn (call_insn);
27140 next_note = NULL;
27141 cached_next_real_insn = NULL;
27142 goto create_label;
27143 }
27144 if (optimize == 0 && !flag_var_tracking)
27145 {
27146 /* When the var-tracking pass is not running, there is no note
27147 for indirect calls whose target is compile-time known. In this
27148 case, process such calls specifically so that we generate call
27149 sites for them anyway. */
27150 rtx x = PATTERN (loc_note);
27151 if (GET_CODE (x) == PARALLEL)
27152 x = XVECEXP (x, 0, 0);
27153 if (GET_CODE (x) == SET)
27154 x = SET_SRC (x);
27155 if (GET_CODE (x) == CALL)
27156 x = XEXP (x, 0);
27157 if (!MEM_P (x)
27158 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27159 || !SYMBOL_REF_DECL (XEXP (x, 0))
27160 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27161 != FUNCTION_DECL))
27162 {
27163 call_insn = loc_note;
27164 loc_note = NULL;
27165 var_loc_p = false;
27166
27167 next_real = dwarf2out_next_real_insn (call_insn);
27168 next_note = NULL;
27169 cached_next_real_insn = NULL;
27170 goto create_label;
27171 }
27172 }
27173 }
27174 else if (!debug_variable_location_views)
27175 gcc_unreachable ();
27176 else
27177 maybe_reset_location_view (loc_note, cur_line_info_table);
27178
27179 return;
27180 }
27181
27182 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27183 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27184 return;
27185
27186 /* Optimize processing a large consecutive sequence of location
27187 notes so we don't spend too much time in next_real_insn. If the
27188 next insn is another location note, remember the next_real_insn
27189 calculation for next time. */
27190 next_real = cached_next_real_insn;
27191 if (next_real)
27192 {
27193 if (expected_next_loc_note != loc_note)
27194 next_real = NULL;
27195 }
27196
27197 next_note = NEXT_INSN (loc_note);
27198 if (! next_note
27199 || next_note->deleted ()
27200 || ! NOTE_P (next_note)
27201 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27202 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27203 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27204 next_note = NULL;
27205
27206 if (! next_real)
27207 next_real = dwarf2out_next_real_insn (loc_note);
27208
27209 if (next_note)
27210 {
27211 expected_next_loc_note = next_note;
27212 cached_next_real_insn = next_real;
27213 }
27214 else
27215 cached_next_real_insn = NULL;
27216
27217 /* If there are no instructions which would be affected by this note,
27218 don't do anything. */
27219 if (var_loc_p
27220 && next_real == NULL_RTX
27221 && !NOTE_DURING_CALL_P (loc_note))
27222 return;
27223
27224 create_label:
27225
27226 if (next_real == NULL_RTX)
27227 next_real = get_last_insn ();
27228
27229 /* If there were any real insns between note we processed last time
27230 and this note (or if it is the first note), clear
27231 last_{,postcall_}label so that they are not reused this time. */
27232 if (last_var_location_insn == NULL_RTX
27233 || last_var_location_insn != next_real
27234 || last_in_cold_section_p != in_cold_section_p)
27235 {
27236 last_label = NULL;
27237 last_postcall_label = NULL;
27238 }
27239
27240 if (var_loc_p)
27241 {
27242 const char *label
27243 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27244 view = cur_line_info_table->view;
27245 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27246 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27247 if (newloc == NULL)
27248 return;
27249 }
27250 else
27251 {
27252 decl = NULL_TREE;
27253 newloc = NULL;
27254 }
27255
27256 /* If there were no real insns between note we processed last time
27257 and this note, use the label we emitted last time. Otherwise
27258 create a new label and emit it. */
27259 if (last_label == NULL)
27260 {
27261 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27262 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27263 loclabel_num++;
27264 last_label = ggc_strdup (loclabel);
27265 /* See if loclabel might be equal to .Ltext0. If yes,
27266 bump first_loclabel_num_not_at_text_label. */
27267 if (!have_multiple_function_sections
27268 && in_first_function_p
27269 && maybe_at_text_label_p)
27270 {
27271 static rtx_insn *last_start;
27272 rtx_insn *insn;
27273 for (insn = loc_note; insn; insn = previous_insn (insn))
27274 if (insn == last_start)
27275 break;
27276 else if (!NONDEBUG_INSN_P (insn))
27277 continue;
27278 else
27279 {
27280 rtx body = PATTERN (insn);
27281 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27282 continue;
27283 /* Inline asm could occupy zero bytes. */
27284 else if (GET_CODE (body) == ASM_INPUT
27285 || asm_noperands (body) >= 0)
27286 continue;
27287 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27288 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27289 continue;
27290 #endif
27291 else
27292 {
27293 /* Assume insn has non-zero length. */
27294 maybe_at_text_label_p = false;
27295 break;
27296 }
27297 }
27298 if (maybe_at_text_label_p)
27299 {
27300 last_start = loc_note;
27301 first_loclabel_num_not_at_text_label = loclabel_num;
27302 }
27303 }
27304 }
27305
27306 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27307 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27308
27309 if (!var_loc_p)
27310 {
27311 struct call_arg_loc_node *ca_loc
27312 = ggc_cleared_alloc<call_arg_loc_node> ();
27313 rtx_insn *prev = call_insn;
27314
27315 ca_loc->call_arg_loc_note
27316 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27317 ca_loc->next = NULL;
27318 ca_loc->label = last_label;
27319 gcc_assert (prev
27320 && (CALL_P (prev)
27321 || (NONJUMP_INSN_P (prev)
27322 && GET_CODE (PATTERN (prev)) == SEQUENCE
27323 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27324 if (!CALL_P (prev))
27325 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27326 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27327
27328 /* Look for a SYMBOL_REF in the "prev" instruction. */
27329 rtx x = get_call_rtx_from (PATTERN (prev));
27330 if (x)
27331 {
27332 /* Try to get the call symbol, if any. */
27333 if (MEM_P (XEXP (x, 0)))
27334 x = XEXP (x, 0);
27335 /* First, look for a memory access to a symbol_ref. */
27336 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27337 && SYMBOL_REF_DECL (XEXP (x, 0))
27338 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27339 ca_loc->symbol_ref = XEXP (x, 0);
27340 /* Otherwise, look at a compile-time known user-level function
27341 declaration. */
27342 else if (MEM_P (x)
27343 && MEM_EXPR (x)
27344 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27345 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27346 }
27347
27348 ca_loc->block = insn_scope (prev);
27349 if (call_arg_locations)
27350 call_arg_loc_last->next = ca_loc;
27351 else
27352 call_arg_locations = ca_loc;
27353 call_arg_loc_last = ca_loc;
27354 }
27355 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27356 {
27357 newloc->label = last_label;
27358 newloc->view = view;
27359 }
27360 else
27361 {
27362 if (!last_postcall_label)
27363 {
27364 sprintf (loclabel, "%s-1", last_label);
27365 last_postcall_label = ggc_strdup (loclabel);
27366 }
27367 newloc->label = last_postcall_label;
27368 /* ??? This view is at last_label, not last_label-1, but we
27369 could only assume view at last_label-1 is zero if we could
27370 assume calls always have length greater than one. This is
27371 probably true in general, though there might be a rare
27372 exception to this rule, e.g. if a call insn is optimized out
27373 by target magic. Then, even the -1 in the label will be
27374 wrong, which might invalidate the range. Anyway, using view,
27375 though technically possibly incorrect, will work as far as
27376 ranges go: since L-1 is in the middle of the call insn,
27377 (L-1).0 and (L-1).V shouldn't make any difference, and having
27378 the loclist entry refer to the .loc entry might be useful, so
27379 leave it like this. */
27380 newloc->view = view;
27381 }
27382
27383 if (var_loc_p && flag_debug_asm)
27384 {
27385 const char *name, *sep, *patstr;
27386 if (decl && DECL_NAME (decl))
27387 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27388 else
27389 name = "";
27390 if (NOTE_VAR_LOCATION_LOC (loc_note))
27391 {
27392 sep = " => ";
27393 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27394 }
27395 else
27396 {
27397 sep = " ";
27398 patstr = "RESET";
27399 }
27400 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27401 name, sep, patstr);
27402 }
27403
27404 last_var_location_insn = next_real;
27405 last_in_cold_section_p = in_cold_section_p;
27406 }
27407
27408 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27409 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27410 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27411 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27412 BLOCK_FRAGMENT_ORIGIN links. */
27413 static bool
27414 block_within_block_p (tree block, tree outer, bool bothways)
27415 {
27416 if (block == outer)
27417 return true;
27418
27419 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27420 for (tree context = BLOCK_SUPERCONTEXT (block);
27421 context != outer;
27422 context = BLOCK_SUPERCONTEXT (context))
27423 if (!context || TREE_CODE (context) != BLOCK)
27424 return false;
27425
27426 if (!bothways)
27427 return true;
27428
27429 /* Now check that each block is actually referenced by its
27430 parent. */
27431 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27432 context = BLOCK_SUPERCONTEXT (context))
27433 {
27434 if (BLOCK_FRAGMENT_ORIGIN (context))
27435 {
27436 gcc_assert (!BLOCK_SUBBLOCKS (context));
27437 context = BLOCK_FRAGMENT_ORIGIN (context);
27438 }
27439 for (tree sub = BLOCK_SUBBLOCKS (context);
27440 sub != block;
27441 sub = BLOCK_CHAIN (sub))
27442 if (!sub)
27443 return false;
27444 if (context == outer)
27445 return true;
27446 else
27447 block = context;
27448 }
27449 }
27450
27451 /* Called during final while assembling the marker of the entry point
27452 for an inlined function. */
27453
27454 static void
27455 dwarf2out_inline_entry (tree block)
27456 {
27457 gcc_assert (debug_inline_points);
27458
27459 /* If we can't represent it, don't bother. */
27460 if (!(dwarf_version >= 3 || !dwarf_strict))
27461 return;
27462
27463 gcc_assert (DECL_P (block_ultimate_origin (block)));
27464
27465 /* Sanity check the block tree. This would catch a case in which
27466 BLOCK got removed from the tree reachable from the outermost
27467 lexical block, but got retained in markers. It would still link
27468 back to its parents, but some ancestor would be missing a link
27469 down the path to the sub BLOCK. If the block got removed, its
27470 BLOCK_NUMBER will not be a usable value. */
27471 if (flag_checking)
27472 gcc_assert (block_within_block_p (block,
27473 DECL_INITIAL (current_function_decl),
27474 true));
27475
27476 gcc_assert (inlined_function_outer_scope_p (block));
27477 gcc_assert (!BLOCK_DIE (block));
27478
27479 if (BLOCK_FRAGMENT_ORIGIN (block))
27480 block = BLOCK_FRAGMENT_ORIGIN (block);
27481 /* Can the entry point ever not be at the beginning of an
27482 unfragmented lexical block? */
27483 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27484 || (cur_line_info_table
27485 && !ZERO_VIEW_P (cur_line_info_table->view))))
27486 return;
27487
27488 if (!inline_entry_data_table)
27489 inline_entry_data_table
27490 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27491
27492
27493 inline_entry_data **iedp
27494 = inline_entry_data_table->find_slot_with_hash (block,
27495 htab_hash_pointer (block),
27496 INSERT);
27497 if (*iedp)
27498 /* ??? Ideally, we'd record all entry points for the same inlined
27499 function (some may have been duplicated by e.g. unrolling), but
27500 we have no way to represent that ATM. */
27501 return;
27502
27503 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27504 ied->block = block;
27505 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27506 ied->label_num = BLOCK_NUMBER (block);
27507 if (cur_line_info_table)
27508 ied->view = cur_line_info_table->view;
27509
27510 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27511
27512 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27513 BLOCK_NUMBER (block));
27514 ASM_OUTPUT_LABEL (asm_out_file, label);
27515 }
27516
27517 /* Called from finalize_size_functions for size functions so that their body
27518 can be encoded in the debug info to describe the layout of variable-length
27519 structures. */
27520
27521 static void
27522 dwarf2out_size_function (tree decl)
27523 {
27524 function_to_dwarf_procedure (decl);
27525 }
27526
27527 /* Note in one location list that text section has changed. */
27528
27529 int
27530 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27531 {
27532 var_loc_list *list = *slot;
27533 if (list->first)
27534 list->last_before_switch
27535 = list->last->next ? list->last->next : list->last;
27536 return 1;
27537 }
27538
27539 /* Note in all location lists that text section has changed. */
27540
27541 static void
27542 var_location_switch_text_section (void)
27543 {
27544 if (decl_loc_table == NULL)
27545 return;
27546
27547 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27548 }
27549
27550 /* Create a new line number table. */
27551
27552 static dw_line_info_table *
27553 new_line_info_table (void)
27554 {
27555 dw_line_info_table *table;
27556
27557 table = ggc_cleared_alloc<dw_line_info_table> ();
27558 table->file_num = 1;
27559 table->line_num = 1;
27560 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27561 FORCE_RESET_NEXT_VIEW (table->view);
27562 table->symviews_since_reset = 0;
27563
27564 return table;
27565 }
27566
27567 /* Lookup the "current" table into which we emit line info, so
27568 that we don't have to do it for every source line. */
27569
27570 static void
27571 set_cur_line_info_table (section *sec)
27572 {
27573 dw_line_info_table *table;
27574
27575 if (sec == text_section)
27576 table = text_section_line_info;
27577 else if (sec == cold_text_section)
27578 {
27579 table = cold_text_section_line_info;
27580 if (!table)
27581 {
27582 cold_text_section_line_info = table = new_line_info_table ();
27583 table->end_label = cold_end_label;
27584 }
27585 }
27586 else
27587 {
27588 const char *end_label;
27589
27590 if (crtl->has_bb_partition)
27591 {
27592 if (in_cold_section_p)
27593 end_label = crtl->subsections.cold_section_end_label;
27594 else
27595 end_label = crtl->subsections.hot_section_end_label;
27596 }
27597 else
27598 {
27599 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27600 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27601 current_function_funcdef_no);
27602 end_label = ggc_strdup (label);
27603 }
27604
27605 table = new_line_info_table ();
27606 table->end_label = end_label;
27607
27608 vec_safe_push (separate_line_info, table);
27609 }
27610
27611 if (output_asm_line_debug_info ())
27612 table->is_stmt = (cur_line_info_table
27613 ? cur_line_info_table->is_stmt
27614 : DWARF_LINE_DEFAULT_IS_STMT_START);
27615 cur_line_info_table = table;
27616 }
27617
27618
27619 /* We need to reset the locations at the beginning of each
27620 function. We can't do this in the end_function hook, because the
27621 declarations that use the locations won't have been output when
27622 that hook is called. Also compute have_multiple_function_sections here. */
27623
27624 static void
27625 dwarf2out_begin_function (tree fun)
27626 {
27627 section *sec = function_section (fun);
27628
27629 if (sec != text_section)
27630 have_multiple_function_sections = true;
27631
27632 if (crtl->has_bb_partition && !cold_text_section)
27633 {
27634 gcc_assert (current_function_decl == fun);
27635 cold_text_section = unlikely_text_section ();
27636 switch_to_section (cold_text_section);
27637 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27638 switch_to_section (sec);
27639 }
27640
27641 dwarf2out_note_section_used ();
27642 call_site_count = 0;
27643 tail_call_site_count = 0;
27644
27645 set_cur_line_info_table (sec);
27646 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27647 }
27648
27649 /* Helper function of dwarf2out_end_function, called only after emitting
27650 the very first function into assembly. Check if some .debug_loc range
27651 might end with a .LVL* label that could be equal to .Ltext0.
27652 In that case we must force using absolute addresses in .debug_loc ranges,
27653 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27654 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27655 list terminator.
27656 Set have_multiple_function_sections to true in that case and
27657 terminate htab traversal. */
27658
27659 int
27660 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27661 {
27662 var_loc_list *entry = *slot;
27663 struct var_loc_node *node;
27664
27665 node = entry->first;
27666 if (node && node->next && node->next->label)
27667 {
27668 unsigned int i;
27669 const char *label = node->next->label;
27670 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27671
27672 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27673 {
27674 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27675 if (strcmp (label, loclabel) == 0)
27676 {
27677 have_multiple_function_sections = true;
27678 return 0;
27679 }
27680 }
27681 }
27682 return 1;
27683 }
27684
27685 /* Hook called after emitting a function into assembly.
27686 This does something only for the very first function emitted. */
27687
27688 static void
27689 dwarf2out_end_function (unsigned int)
27690 {
27691 if (in_first_function_p
27692 && !have_multiple_function_sections
27693 && first_loclabel_num_not_at_text_label
27694 && decl_loc_table)
27695 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27696 in_first_function_p = false;
27697 maybe_at_text_label_p = false;
27698 }
27699
27700 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27701 front-ends register a translation unit even before dwarf2out_init is
27702 called. */
27703 static tree main_translation_unit = NULL_TREE;
27704
27705 /* Hook called by front-ends after they built their main translation unit.
27706 Associate comp_unit_die to UNIT. */
27707
27708 static void
27709 dwarf2out_register_main_translation_unit (tree unit)
27710 {
27711 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27712 && main_translation_unit == NULL_TREE);
27713 main_translation_unit = unit;
27714 /* If dwarf2out_init has not been called yet, it will perform the association
27715 itself looking at main_translation_unit. */
27716 if (decl_die_table != NULL)
27717 equate_decl_number_to_die (unit, comp_unit_die ());
27718 }
27719
27720 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27721
27722 static void
27723 push_dw_line_info_entry (dw_line_info_table *table,
27724 enum dw_line_info_opcode opcode, unsigned int val)
27725 {
27726 dw_line_info_entry e;
27727 e.opcode = opcode;
27728 e.val = val;
27729 vec_safe_push (table->entries, e);
27730 }
27731
27732 /* Output a label to mark the beginning of a source code line entry
27733 and record information relating to this source line, in
27734 'line_info_table' for later output of the .debug_line section. */
27735 /* ??? The discriminator parameter ought to be unsigned. */
27736
27737 static void
27738 dwarf2out_source_line (unsigned int line, unsigned int column,
27739 const char *filename,
27740 int discriminator, bool is_stmt)
27741 {
27742 unsigned int file_num;
27743 dw_line_info_table *table;
27744 static var_loc_view lvugid;
27745
27746 if (debug_info_level < DINFO_LEVEL_TERSE)
27747 return;
27748
27749 table = cur_line_info_table;
27750
27751 if (line == 0)
27752 {
27753 if (debug_variable_location_views
27754 && output_asm_line_debug_info ()
27755 && table && !RESETTING_VIEW_P (table->view))
27756 {
27757 /* If we're using the assembler to compute view numbers, we
27758 can't issue a .loc directive for line zero, so we can't
27759 get a view number at this point. We might attempt to
27760 compute it from the previous view, or equate it to a
27761 subsequent view (though it might not be there!), but
27762 since we're omitting the line number entry, we might as
27763 well omit the view number as well. That means pretending
27764 it's a view number zero, which might very well turn out
27765 to be correct. ??? Extend the assembler so that the
27766 compiler could emit e.g. ".locview .LVU#", to output a
27767 view without changing line number information. We'd then
27768 have to count it in symviews_since_reset; when it's omitted,
27769 it doesn't count. */
27770 if (!zero_view_p)
27771 zero_view_p = BITMAP_GGC_ALLOC ();
27772 bitmap_set_bit (zero_view_p, table->view);
27773 if (flag_debug_asm)
27774 {
27775 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27776 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27777 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27778 ASM_COMMENT_START);
27779 assemble_name (asm_out_file, label);
27780 putc ('\n', asm_out_file);
27781 }
27782 table->view = ++lvugid;
27783 }
27784 return;
27785 }
27786
27787 /* The discriminator column was added in dwarf4. Simplify the below
27788 by simply removing it if we're not supposed to output it. */
27789 if (dwarf_version < 4 && dwarf_strict)
27790 discriminator = 0;
27791
27792 if (!debug_column_info)
27793 column = 0;
27794
27795 file_num = maybe_emit_file (lookup_filename (filename));
27796
27797 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27798 the debugger has used the second (possibly duplicate) line number
27799 at the beginning of the function to mark the end of the prologue.
27800 We could eliminate any other duplicates within the function. For
27801 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27802 that second line number entry. */
27803 /* Recall that this end-of-prologue indication is *not* the same thing
27804 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27805 to which the hook corresponds, follows the last insn that was
27806 emitted by gen_prologue. What we need is to precede the first insn
27807 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27808 insn that corresponds to something the user wrote. These may be
27809 very different locations once scheduling is enabled. */
27810
27811 if (0 && file_num == table->file_num
27812 && line == table->line_num
27813 && column == table->column_num
27814 && discriminator == table->discrim_num
27815 && is_stmt == table->is_stmt)
27816 return;
27817
27818 switch_to_section (current_function_section ());
27819
27820 /* If requested, emit something human-readable. */
27821 if (flag_debug_asm)
27822 {
27823 if (debug_column_info)
27824 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27825 filename, line, column);
27826 else
27827 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27828 filename, line);
27829 }
27830
27831 if (output_asm_line_debug_info ())
27832 {
27833 /* Emit the .loc directive understood by GNU as. */
27834 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27835 file_num, line, is_stmt, discriminator */
27836 fputs ("\t.loc ", asm_out_file);
27837 fprint_ul (asm_out_file, file_num);
27838 putc (' ', asm_out_file);
27839 fprint_ul (asm_out_file, line);
27840 putc (' ', asm_out_file);
27841 fprint_ul (asm_out_file, column);
27842
27843 if (is_stmt != table->is_stmt)
27844 {
27845 fputs (" is_stmt ", asm_out_file);
27846 putc (is_stmt ? '1' : '0', asm_out_file);
27847 }
27848 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27849 {
27850 gcc_assert (discriminator > 0);
27851 fputs (" discriminator ", asm_out_file);
27852 fprint_ul (asm_out_file, (unsigned long) discriminator);
27853 }
27854 if (debug_variable_location_views)
27855 {
27856 if (!RESETTING_VIEW_P (table->view))
27857 {
27858 table->symviews_since_reset++;
27859 if (table->symviews_since_reset > symview_upper_bound)
27860 symview_upper_bound = table->symviews_since_reset;
27861 /* When we're using the assembler to compute view
27862 numbers, we output symbolic labels after "view" in
27863 .loc directives, and the assembler will set them for
27864 us, so that we can refer to the view numbers in
27865 location lists. The only exceptions are when we know
27866 a view will be zero: "-0" is a forced reset, used
27867 e.g. in the beginning of functions, whereas "0" tells
27868 the assembler to check that there was a PC change
27869 since the previous view, in a way that implicitly
27870 resets the next view. */
27871 fputs (" view ", asm_out_file);
27872 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27873 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27874 assemble_name (asm_out_file, label);
27875 table->view = ++lvugid;
27876 }
27877 else
27878 {
27879 table->symviews_since_reset = 0;
27880 if (FORCE_RESETTING_VIEW_P (table->view))
27881 fputs (" view -0", asm_out_file);
27882 else
27883 fputs (" view 0", asm_out_file);
27884 /* Mark the present view as a zero view. Earlier debug
27885 binds may have already added its id to loclists to be
27886 emitted later, so we can't reuse the id for something
27887 else. However, it's good to know whether a view is
27888 known to be zero, because then we may be able to
27889 optimize out locviews that are all zeros, so take
27890 note of it in zero_view_p. */
27891 if (!zero_view_p)
27892 zero_view_p = BITMAP_GGC_ALLOC ();
27893 bitmap_set_bit (zero_view_p, lvugid);
27894 table->view = ++lvugid;
27895 }
27896 }
27897 putc ('\n', asm_out_file);
27898 }
27899 else
27900 {
27901 unsigned int label_num = ++line_info_label_num;
27902
27903 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27904
27905 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27906 push_dw_line_info_entry (table, LI_adv_address, label_num);
27907 else
27908 push_dw_line_info_entry (table, LI_set_address, label_num);
27909 if (debug_variable_location_views)
27910 {
27911 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27912 if (resetting)
27913 table->view = 0;
27914
27915 if (flag_debug_asm)
27916 fprintf (asm_out_file, "\t%s view %s%d\n",
27917 ASM_COMMENT_START,
27918 resetting ? "-" : "",
27919 table->view);
27920
27921 table->view++;
27922 }
27923 if (file_num != table->file_num)
27924 push_dw_line_info_entry (table, LI_set_file, file_num);
27925 if (discriminator != table->discrim_num)
27926 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27927 if (is_stmt != table->is_stmt)
27928 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27929 push_dw_line_info_entry (table, LI_set_line, line);
27930 if (debug_column_info)
27931 push_dw_line_info_entry (table, LI_set_column, column);
27932 }
27933
27934 table->file_num = file_num;
27935 table->line_num = line;
27936 table->column_num = column;
27937 table->discrim_num = discriminator;
27938 table->is_stmt = is_stmt;
27939 table->in_use = true;
27940 }
27941
27942 /* Record the beginning of a new source file. */
27943
27944 static void
27945 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27946 {
27947 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27948 {
27949 macinfo_entry e;
27950 e.code = DW_MACINFO_start_file;
27951 e.lineno = lineno;
27952 e.info = ggc_strdup (filename);
27953 vec_safe_push (macinfo_table, e);
27954 }
27955 }
27956
27957 /* Record the end of a source file. */
27958
27959 static void
27960 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27961 {
27962 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27963 {
27964 macinfo_entry e;
27965 e.code = DW_MACINFO_end_file;
27966 e.lineno = lineno;
27967 e.info = NULL;
27968 vec_safe_push (macinfo_table, e);
27969 }
27970 }
27971
27972 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27973 the tail part of the directive line, i.e. the part which is past the
27974 initial whitespace, #, whitespace, directive-name, whitespace part. */
27975
27976 static void
27977 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27978 const char *buffer ATTRIBUTE_UNUSED)
27979 {
27980 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27981 {
27982 macinfo_entry e;
27983 /* Insert a dummy first entry to be able to optimize the whole
27984 predefined macro block using DW_MACRO_import. */
27985 if (macinfo_table->is_empty () && lineno <= 1)
27986 {
27987 e.code = 0;
27988 e.lineno = 0;
27989 e.info = NULL;
27990 vec_safe_push (macinfo_table, e);
27991 }
27992 e.code = DW_MACINFO_define;
27993 e.lineno = lineno;
27994 e.info = ggc_strdup (buffer);
27995 vec_safe_push (macinfo_table, e);
27996 }
27997 }
27998
27999 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28000 the tail part of the directive line, i.e. the part which is past the
28001 initial whitespace, #, whitespace, directive-name, whitespace part. */
28002
28003 static void
28004 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28005 const char *buffer ATTRIBUTE_UNUSED)
28006 {
28007 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28008 {
28009 macinfo_entry e;
28010 /* Insert a dummy first entry to be able to optimize the whole
28011 predefined macro block using DW_MACRO_import. */
28012 if (macinfo_table->is_empty () && lineno <= 1)
28013 {
28014 e.code = 0;
28015 e.lineno = 0;
28016 e.info = NULL;
28017 vec_safe_push (macinfo_table, e);
28018 }
28019 e.code = DW_MACINFO_undef;
28020 e.lineno = lineno;
28021 e.info = ggc_strdup (buffer);
28022 vec_safe_push (macinfo_table, e);
28023 }
28024 }
28025
28026 /* Helpers to manipulate hash table of CUs. */
28027
28028 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28029 {
28030 static inline hashval_t hash (const macinfo_entry *);
28031 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28032 };
28033
28034 inline hashval_t
28035 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28036 {
28037 return htab_hash_string (entry->info);
28038 }
28039
28040 inline bool
28041 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28042 const macinfo_entry *entry2)
28043 {
28044 return !strcmp (entry1->info, entry2->info);
28045 }
28046
28047 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28048
28049 /* Output a single .debug_macinfo entry. */
28050
28051 static void
28052 output_macinfo_op (macinfo_entry *ref)
28053 {
28054 int file_num;
28055 size_t len;
28056 struct indirect_string_node *node;
28057 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28058 struct dwarf_file_data *fd;
28059
28060 switch (ref->code)
28061 {
28062 case DW_MACINFO_start_file:
28063 fd = lookup_filename (ref->info);
28064 file_num = maybe_emit_file (fd);
28065 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28066 dw2_asm_output_data_uleb128 (ref->lineno,
28067 "Included from line number %lu",
28068 (unsigned long) ref->lineno);
28069 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28070 break;
28071 case DW_MACINFO_end_file:
28072 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28073 break;
28074 case DW_MACINFO_define:
28075 case DW_MACINFO_undef:
28076 len = strlen (ref->info) + 1;
28077 if (!dwarf_strict
28078 && len > DWARF_OFFSET_SIZE
28079 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28080 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28081 {
28082 ref->code = ref->code == DW_MACINFO_define
28083 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28084 output_macinfo_op (ref);
28085 return;
28086 }
28087 dw2_asm_output_data (1, ref->code,
28088 ref->code == DW_MACINFO_define
28089 ? "Define macro" : "Undefine macro");
28090 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28091 (unsigned long) ref->lineno);
28092 dw2_asm_output_nstring (ref->info, -1, "The macro");
28093 break;
28094 case DW_MACRO_define_strp:
28095 case DW_MACRO_undef_strp:
28096 node = find_AT_string (ref->info);
28097 gcc_assert (node
28098 && (node->form == DW_FORM_strp
28099 || node->form == dwarf_form (DW_FORM_strx)));
28100 dw2_asm_output_data (1, ref->code,
28101 ref->code == DW_MACRO_define_strp
28102 ? "Define macro strp"
28103 : "Undefine macro strp");
28104 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28105 (unsigned long) ref->lineno);
28106 if (node->form == DW_FORM_strp)
28107 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28108 debug_str_section, "The macro: \"%s\"",
28109 ref->info);
28110 else
28111 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28112 ref->info);
28113 break;
28114 case DW_MACRO_import:
28115 dw2_asm_output_data (1, ref->code, "Import");
28116 ASM_GENERATE_INTERNAL_LABEL (label,
28117 DEBUG_MACRO_SECTION_LABEL,
28118 ref->lineno + macinfo_label_base);
28119 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28120 break;
28121 default:
28122 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28123 ASM_COMMENT_START, (unsigned long) ref->code);
28124 break;
28125 }
28126 }
28127
28128 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28129 other compilation unit .debug_macinfo sections. IDX is the first
28130 index of a define/undef, return the number of ops that should be
28131 emitted in a comdat .debug_macinfo section and emit
28132 a DW_MACRO_import entry referencing it.
28133 If the define/undef entry should be emitted normally, return 0. */
28134
28135 static unsigned
28136 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28137 macinfo_hash_type **macinfo_htab)
28138 {
28139 macinfo_entry *first, *second, *cur, *inc;
28140 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28141 unsigned char checksum[16];
28142 struct md5_ctx ctx;
28143 char *grp_name, *tail;
28144 const char *base;
28145 unsigned int i, count, encoded_filename_len, linebuf_len;
28146 macinfo_entry **slot;
28147
28148 first = &(*macinfo_table)[idx];
28149 second = &(*macinfo_table)[idx + 1];
28150
28151 /* Optimize only if there are at least two consecutive define/undef ops,
28152 and either all of them are before first DW_MACINFO_start_file
28153 with lineno {0,1} (i.e. predefined macro block), or all of them are
28154 in some included header file. */
28155 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28156 return 0;
28157 if (vec_safe_is_empty (files))
28158 {
28159 if (first->lineno > 1 || second->lineno > 1)
28160 return 0;
28161 }
28162 else if (first->lineno == 0)
28163 return 0;
28164
28165 /* Find the last define/undef entry that can be grouped together
28166 with first and at the same time compute md5 checksum of their
28167 codes, linenumbers and strings. */
28168 md5_init_ctx (&ctx);
28169 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28170 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28171 break;
28172 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28173 break;
28174 else
28175 {
28176 unsigned char code = cur->code;
28177 md5_process_bytes (&code, 1, &ctx);
28178 checksum_uleb128 (cur->lineno, &ctx);
28179 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28180 }
28181 md5_finish_ctx (&ctx, checksum);
28182 count = i - idx;
28183
28184 /* From the containing include filename (if any) pick up just
28185 usable characters from its basename. */
28186 if (vec_safe_is_empty (files))
28187 base = "";
28188 else
28189 base = lbasename (files->last ().info);
28190 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28191 if (ISIDNUM (base[i]) || base[i] == '.')
28192 encoded_filename_len++;
28193 /* Count . at the end. */
28194 if (encoded_filename_len)
28195 encoded_filename_len++;
28196
28197 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28198 linebuf_len = strlen (linebuf);
28199
28200 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28201 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28202 + 16 * 2 + 1);
28203 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28204 tail = grp_name + 4;
28205 if (encoded_filename_len)
28206 {
28207 for (i = 0; base[i]; i++)
28208 if (ISIDNUM (base[i]) || base[i] == '.')
28209 *tail++ = base[i];
28210 *tail++ = '.';
28211 }
28212 memcpy (tail, linebuf, linebuf_len);
28213 tail += linebuf_len;
28214 *tail++ = '.';
28215 for (i = 0; i < 16; i++)
28216 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28217
28218 /* Construct a macinfo_entry for DW_MACRO_import
28219 in the empty vector entry before the first define/undef. */
28220 inc = &(*macinfo_table)[idx - 1];
28221 inc->code = DW_MACRO_import;
28222 inc->lineno = 0;
28223 inc->info = ggc_strdup (grp_name);
28224 if (!*macinfo_htab)
28225 *macinfo_htab = new macinfo_hash_type (10);
28226 /* Avoid emitting duplicates. */
28227 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28228 if (*slot != NULL)
28229 {
28230 inc->code = 0;
28231 inc->info = NULL;
28232 /* If such an entry has been used before, just emit
28233 a DW_MACRO_import op. */
28234 inc = *slot;
28235 output_macinfo_op (inc);
28236 /* And clear all macinfo_entry in the range to avoid emitting them
28237 in the second pass. */
28238 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28239 {
28240 cur->code = 0;
28241 cur->info = NULL;
28242 }
28243 }
28244 else
28245 {
28246 *slot = inc;
28247 inc->lineno = (*macinfo_htab)->elements ();
28248 output_macinfo_op (inc);
28249 }
28250 return count;
28251 }
28252
28253 /* Save any strings needed by the macinfo table in the debug str
28254 table. All strings must be collected into the table by the time
28255 index_string is called. */
28256
28257 static void
28258 save_macinfo_strings (void)
28259 {
28260 unsigned len;
28261 unsigned i;
28262 macinfo_entry *ref;
28263
28264 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28265 {
28266 switch (ref->code)
28267 {
28268 /* Match the logic in output_macinfo_op to decide on
28269 indirect strings. */
28270 case DW_MACINFO_define:
28271 case DW_MACINFO_undef:
28272 len = strlen (ref->info) + 1;
28273 if (!dwarf_strict
28274 && len > DWARF_OFFSET_SIZE
28275 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28276 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28277 set_indirect_string (find_AT_string (ref->info));
28278 break;
28279 case DW_MACRO_define_strp:
28280 case DW_MACRO_undef_strp:
28281 set_indirect_string (find_AT_string (ref->info));
28282 break;
28283 default:
28284 break;
28285 }
28286 }
28287 }
28288
28289 /* Output macinfo section(s). */
28290
28291 static void
28292 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28293 {
28294 unsigned i;
28295 unsigned long length = vec_safe_length (macinfo_table);
28296 macinfo_entry *ref;
28297 vec<macinfo_entry, va_gc> *files = NULL;
28298 macinfo_hash_type *macinfo_htab = NULL;
28299 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28300
28301 if (! length)
28302 return;
28303
28304 /* output_macinfo* uses these interchangeably. */
28305 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28306 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28307 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28308 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28309
28310 /* AIX Assembler inserts the length, so adjust the reference to match the
28311 offset expected by debuggers. */
28312 strcpy (dl_section_ref, debug_line_label);
28313 if (XCOFF_DEBUGGING_INFO)
28314 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28315
28316 /* For .debug_macro emit the section header. */
28317 if (!dwarf_strict || dwarf_version >= 5)
28318 {
28319 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28320 "DWARF macro version number");
28321 if (DWARF_OFFSET_SIZE == 8)
28322 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28323 else
28324 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28325 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28326 debug_line_section, NULL);
28327 }
28328
28329 /* In the first loop, it emits the primary .debug_macinfo section
28330 and after each emitted op the macinfo_entry is cleared.
28331 If a longer range of define/undef ops can be optimized using
28332 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28333 the vector before the first define/undef in the range and the
28334 whole range of define/undef ops is not emitted and kept. */
28335 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28336 {
28337 switch (ref->code)
28338 {
28339 case DW_MACINFO_start_file:
28340 vec_safe_push (files, *ref);
28341 break;
28342 case DW_MACINFO_end_file:
28343 if (!vec_safe_is_empty (files))
28344 files->pop ();
28345 break;
28346 case DW_MACINFO_define:
28347 case DW_MACINFO_undef:
28348 if ((!dwarf_strict || dwarf_version >= 5)
28349 && HAVE_COMDAT_GROUP
28350 && vec_safe_length (files) != 1
28351 && i > 0
28352 && i + 1 < length
28353 && (*macinfo_table)[i - 1].code == 0)
28354 {
28355 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28356 if (count)
28357 {
28358 i += count - 1;
28359 continue;
28360 }
28361 }
28362 break;
28363 case 0:
28364 /* A dummy entry may be inserted at the beginning to be able
28365 to optimize the whole block of predefined macros. */
28366 if (i == 0)
28367 continue;
28368 default:
28369 break;
28370 }
28371 output_macinfo_op (ref);
28372 ref->info = NULL;
28373 ref->code = 0;
28374 }
28375
28376 if (!macinfo_htab)
28377 return;
28378
28379 /* Save the number of transparent includes so we can adjust the
28380 label number for the fat LTO object DWARF. */
28381 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28382
28383 delete macinfo_htab;
28384 macinfo_htab = NULL;
28385
28386 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28387 terminate the current chain and switch to a new comdat .debug_macinfo
28388 section and emit the define/undef entries within it. */
28389 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28390 switch (ref->code)
28391 {
28392 case 0:
28393 continue;
28394 case DW_MACRO_import:
28395 {
28396 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28397 tree comdat_key = get_identifier (ref->info);
28398 /* Terminate the previous .debug_macinfo section. */
28399 dw2_asm_output_data (1, 0, "End compilation unit");
28400 targetm.asm_out.named_section (debug_macinfo_section_name,
28401 SECTION_DEBUG
28402 | SECTION_LINKONCE
28403 | (early_lto_debug
28404 ? SECTION_EXCLUDE : 0),
28405 comdat_key);
28406 ASM_GENERATE_INTERNAL_LABEL (label,
28407 DEBUG_MACRO_SECTION_LABEL,
28408 ref->lineno + macinfo_label_base);
28409 ASM_OUTPUT_LABEL (asm_out_file, label);
28410 ref->code = 0;
28411 ref->info = NULL;
28412 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28413 "DWARF macro version number");
28414 if (DWARF_OFFSET_SIZE == 8)
28415 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28416 else
28417 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28418 }
28419 break;
28420 case DW_MACINFO_define:
28421 case DW_MACINFO_undef:
28422 output_macinfo_op (ref);
28423 ref->code = 0;
28424 ref->info = NULL;
28425 break;
28426 default:
28427 gcc_unreachable ();
28428 }
28429
28430 macinfo_label_base += macinfo_label_base_adj;
28431 }
28432
28433 /* Initialize the various sections and labels for dwarf output and prefix
28434 them with PREFIX if non-NULL. Returns the generation (zero based
28435 number of times function was called). */
28436
28437 static unsigned
28438 init_sections_and_labels (bool early_lto_debug)
28439 {
28440 /* As we may get called multiple times have a generation count for
28441 labels. */
28442 static unsigned generation = 0;
28443
28444 if (early_lto_debug)
28445 {
28446 if (!dwarf_split_debug_info)
28447 {
28448 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28449 SECTION_DEBUG | SECTION_EXCLUDE,
28450 NULL);
28451 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28452 SECTION_DEBUG | SECTION_EXCLUDE,
28453 NULL);
28454 debug_macinfo_section_name
28455 = ((dwarf_strict && dwarf_version < 5)
28456 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28457 debug_macinfo_section = get_section (debug_macinfo_section_name,
28458 SECTION_DEBUG
28459 | SECTION_EXCLUDE, NULL);
28460 }
28461 else
28462 {
28463 /* ??? Which of the following do we need early? */
28464 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28465 SECTION_DEBUG | SECTION_EXCLUDE,
28466 NULL);
28467 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28468 SECTION_DEBUG | SECTION_EXCLUDE,
28469 NULL);
28470 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28471 SECTION_DEBUG
28472 | SECTION_EXCLUDE, NULL);
28473 debug_skeleton_abbrev_section
28474 = get_section (DEBUG_LTO_ABBREV_SECTION,
28475 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28476 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28477 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28478 generation);
28479
28480 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28481 stay in the main .o, but the skeleton_line goes into the split
28482 off dwo. */
28483 debug_skeleton_line_section
28484 = get_section (DEBUG_LTO_LINE_SECTION,
28485 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28486 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28487 DEBUG_SKELETON_LINE_SECTION_LABEL,
28488 generation);
28489 debug_str_offsets_section
28490 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28491 SECTION_DEBUG | SECTION_EXCLUDE,
28492 NULL);
28493 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28494 DEBUG_SKELETON_INFO_SECTION_LABEL,
28495 generation);
28496 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28497 DEBUG_STR_DWO_SECTION_FLAGS,
28498 NULL);
28499 debug_macinfo_section_name
28500 = ((dwarf_strict && dwarf_version < 5)
28501 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28502 debug_macinfo_section = get_section (debug_macinfo_section_name,
28503 SECTION_DEBUG | SECTION_EXCLUDE,
28504 NULL);
28505 }
28506 /* For macro info and the file table we have to refer to a
28507 debug_line section. */
28508 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28509 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28510 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28511 DEBUG_LINE_SECTION_LABEL, generation);
28512
28513 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28514 DEBUG_STR_SECTION_FLAGS
28515 | SECTION_EXCLUDE, NULL);
28516 if (!dwarf_split_debug_info && !dwarf2out_as_loc_support)
28517 debug_line_str_section
28518 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28519 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28520 }
28521 else
28522 {
28523 if (!dwarf_split_debug_info)
28524 {
28525 debug_info_section = get_section (DEBUG_INFO_SECTION,
28526 SECTION_DEBUG, NULL);
28527 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28528 SECTION_DEBUG, NULL);
28529 debug_loc_section = get_section (dwarf_version >= 5
28530 ? DEBUG_LOCLISTS_SECTION
28531 : DEBUG_LOC_SECTION,
28532 SECTION_DEBUG, NULL);
28533 debug_macinfo_section_name
28534 = ((dwarf_strict && dwarf_version < 5)
28535 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28536 debug_macinfo_section = get_section (debug_macinfo_section_name,
28537 SECTION_DEBUG, NULL);
28538 }
28539 else
28540 {
28541 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28542 SECTION_DEBUG | SECTION_EXCLUDE,
28543 NULL);
28544 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28545 SECTION_DEBUG | SECTION_EXCLUDE,
28546 NULL);
28547 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28548 SECTION_DEBUG, NULL);
28549 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28550 SECTION_DEBUG, NULL);
28551 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28552 SECTION_DEBUG, NULL);
28553 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28554 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28555 generation);
28556
28557 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28558 stay in the main .o, but the skeleton_line goes into the
28559 split off dwo. */
28560 debug_skeleton_line_section
28561 = get_section (DEBUG_DWO_LINE_SECTION,
28562 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28563 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28564 DEBUG_SKELETON_LINE_SECTION_LABEL,
28565 generation);
28566 debug_str_offsets_section
28567 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28568 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28569 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28570 DEBUG_SKELETON_INFO_SECTION_LABEL,
28571 generation);
28572 debug_loc_section = get_section (dwarf_version >= 5
28573 ? DEBUG_DWO_LOCLISTS_SECTION
28574 : DEBUG_DWO_LOC_SECTION,
28575 SECTION_DEBUG | SECTION_EXCLUDE,
28576 NULL);
28577 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28578 DEBUG_STR_DWO_SECTION_FLAGS,
28579 NULL);
28580 debug_macinfo_section_name
28581 = ((dwarf_strict && dwarf_version < 5)
28582 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28583 debug_macinfo_section = get_section (debug_macinfo_section_name,
28584 SECTION_DEBUG | SECTION_EXCLUDE,
28585 NULL);
28586 }
28587 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28588 SECTION_DEBUG, NULL);
28589 debug_line_section = get_section (DEBUG_LINE_SECTION,
28590 SECTION_DEBUG, NULL);
28591 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28592 SECTION_DEBUG, NULL);
28593 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28594 SECTION_DEBUG, NULL);
28595 debug_str_section = get_section (DEBUG_STR_SECTION,
28596 DEBUG_STR_SECTION_FLAGS, NULL);
28597 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28598 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28599 DEBUG_STR_SECTION_FLAGS, NULL);
28600
28601 debug_ranges_section = get_section (dwarf_version >= 5
28602 ? DEBUG_RNGLISTS_SECTION
28603 : DEBUG_RANGES_SECTION,
28604 SECTION_DEBUG, NULL);
28605 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28606 SECTION_DEBUG, NULL);
28607 }
28608
28609 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28610 DEBUG_ABBREV_SECTION_LABEL, generation);
28611 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28612 DEBUG_INFO_SECTION_LABEL, generation);
28613 info_section_emitted = false;
28614 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28615 DEBUG_LINE_SECTION_LABEL, generation);
28616 /* There are up to 4 unique ranges labels per generation.
28617 See also output_rnglists. */
28618 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28619 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28620 if (dwarf_version >= 5 && dwarf_split_debug_info)
28621 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28622 DEBUG_RANGES_SECTION_LABEL,
28623 1 + generation * 4);
28624 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28625 DEBUG_ADDR_SECTION_LABEL, generation);
28626 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28627 (dwarf_strict && dwarf_version < 5)
28628 ? DEBUG_MACINFO_SECTION_LABEL
28629 : DEBUG_MACRO_SECTION_LABEL, generation);
28630 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28631 generation);
28632
28633 ++generation;
28634 return generation - 1;
28635 }
28636
28637 /* Set up for Dwarf output at the start of compilation. */
28638
28639 static void
28640 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28641 {
28642 /* Allocate the file_table. */
28643 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28644
28645 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28646 /* Allocate the decl_die_table. */
28647 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28648
28649 /* Allocate the decl_loc_table. */
28650 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28651
28652 /* Allocate the cached_dw_loc_list_table. */
28653 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28654
28655 /* Allocate the initial hunk of the decl_scope_table. */
28656 vec_alloc (decl_scope_table, 256);
28657
28658 /* Allocate the initial hunk of the abbrev_die_table. */
28659 vec_alloc (abbrev_die_table, 256);
28660 /* Zero-th entry is allocated, but unused. */
28661 abbrev_die_table->quick_push (NULL);
28662
28663 /* Allocate the dwarf_proc_stack_usage_map. */
28664 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28665
28666 /* Allocate the pubtypes and pubnames vectors. */
28667 vec_alloc (pubname_table, 32);
28668 vec_alloc (pubtype_table, 32);
28669
28670 vec_alloc (incomplete_types, 64);
28671
28672 vec_alloc (used_rtx_array, 32);
28673
28674 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28675 vec_alloc (macinfo_table, 64);
28676 #endif
28677
28678 /* If front-ends already registered a main translation unit but we were not
28679 ready to perform the association, do this now. */
28680 if (main_translation_unit != NULL_TREE)
28681 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28682 }
28683
28684 /* Called before compile () starts outputtting functions, variables
28685 and toplevel asms into assembly. */
28686
28687 static void
28688 dwarf2out_assembly_start (void)
28689 {
28690 if (text_section_line_info)
28691 return;
28692
28693 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28694 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28695 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28696 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28697 COLD_TEXT_SECTION_LABEL, 0);
28698 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28699
28700 switch_to_section (text_section);
28701 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28702 #endif
28703
28704 /* Make sure the line number table for .text always exists. */
28705 text_section_line_info = new_line_info_table ();
28706 text_section_line_info->end_label = text_end_label;
28707
28708 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28709 cur_line_info_table = text_section_line_info;
28710 #endif
28711
28712 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28713 && dwarf2out_do_cfi_asm ()
28714 && !dwarf2out_do_eh_frame ())
28715 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28716 }
28717
28718 /* A helper function for dwarf2out_finish called through
28719 htab_traverse. Assign a string its index. All strings must be
28720 collected into the table by the time index_string is called,
28721 because the indexing code relies on htab_traverse to traverse nodes
28722 in the same order for each run. */
28723
28724 int
28725 index_string (indirect_string_node **h, unsigned int *index)
28726 {
28727 indirect_string_node *node = *h;
28728
28729 find_string_form (node);
28730 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28731 {
28732 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28733 node->index = *index;
28734 *index += 1;
28735 }
28736 return 1;
28737 }
28738
28739 /* A helper function for output_indirect_strings called through
28740 htab_traverse. Output the offset to a string and update the
28741 current offset. */
28742
28743 int
28744 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28745 {
28746 indirect_string_node *node = *h;
28747
28748 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28749 {
28750 /* Assert that this node has been assigned an index. */
28751 gcc_assert (node->index != NO_INDEX_ASSIGNED
28752 && node->index != NOT_INDEXED);
28753 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28754 "indexed string 0x%x: %s", node->index, node->str);
28755 *offset += strlen (node->str) + 1;
28756 }
28757 return 1;
28758 }
28759
28760 /* A helper function for dwarf2out_finish called through
28761 htab_traverse. Output the indexed string. */
28762
28763 int
28764 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28765 {
28766 struct indirect_string_node *node = *h;
28767
28768 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28769 {
28770 /* Assert that the strings are output in the same order as their
28771 indexes were assigned. */
28772 gcc_assert (*cur_idx == node->index);
28773 assemble_string (node->str, strlen (node->str) + 1);
28774 *cur_idx += 1;
28775 }
28776 return 1;
28777 }
28778
28779 /* A helper function for output_indirect_strings. Counts the number
28780 of index strings offsets. Must match the logic of the functions
28781 output_index_string[_offsets] above. */
28782 int
28783 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28784 {
28785 struct indirect_string_node *node = *h;
28786
28787 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28788 *last_idx += 1;
28789 return 1;
28790 }
28791
28792 /* A helper function for dwarf2out_finish called through
28793 htab_traverse. Emit one queued .debug_str string. */
28794
28795 int
28796 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28797 {
28798 struct indirect_string_node *node = *h;
28799
28800 node->form = find_string_form (node);
28801 if (node->form == form && node->refcount > 0)
28802 {
28803 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28804 assemble_string (node->str, strlen (node->str) + 1);
28805 }
28806
28807 return 1;
28808 }
28809
28810 /* Output the indexed string table. */
28811
28812 static void
28813 output_indirect_strings (void)
28814 {
28815 switch_to_section (debug_str_section);
28816 if (!dwarf_split_debug_info)
28817 debug_str_hash->traverse<enum dwarf_form,
28818 output_indirect_string> (DW_FORM_strp);
28819 else
28820 {
28821 unsigned int offset = 0;
28822 unsigned int cur_idx = 0;
28823
28824 if (skeleton_debug_str_hash)
28825 skeleton_debug_str_hash->traverse<enum dwarf_form,
28826 output_indirect_string> (DW_FORM_strp);
28827
28828 switch_to_section (debug_str_offsets_section);
28829 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28830 header. Note that we don't need to generate a label to the
28831 actual index table following the header here, because this is
28832 for the split dwarf case only. In an .dwo file there is only
28833 one string offsets table (and one debug info section). But
28834 if we would start using string offset tables for the main (or
28835 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28836 pointing to the actual index after the header. Split dwarf
28837 units will never have a string offsets base attribute. When
28838 a split unit is moved into a .dwp file the string offsets can
28839 be found through the .debug_cu_index section table. */
28840 if (dwarf_version >= 5)
28841 {
28842 unsigned int last_idx = 0;
28843 unsigned long str_offsets_length;
28844
28845 debug_str_hash->traverse_noresize
28846 <unsigned int *, count_index_strings> (&last_idx);
28847 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28848 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28849 dw2_asm_output_data (4, 0xffffffff,
28850 "Escape value for 64-bit DWARF extension");
28851 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28852 "Length of string offsets unit");
28853 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28854 dw2_asm_output_data (2, 0, "Header zero padding");
28855 }
28856 debug_str_hash->traverse_noresize
28857 <unsigned int *, output_index_string_offset> (&offset);
28858 switch_to_section (debug_str_dwo_section);
28859 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28860 (&cur_idx);
28861 }
28862 }
28863
28864 /* Callback for htab_traverse to assign an index to an entry in the
28865 table, and to write that entry to the .debug_addr section. */
28866
28867 int
28868 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28869 {
28870 addr_table_entry *entry = *slot;
28871
28872 if (entry->refcount == 0)
28873 {
28874 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28875 || entry->index == NOT_INDEXED);
28876 return 1;
28877 }
28878
28879 gcc_assert (entry->index == *cur_index);
28880 (*cur_index)++;
28881
28882 switch (entry->kind)
28883 {
28884 case ate_kind_rtx:
28885 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28886 "0x%x", entry->index);
28887 break;
28888 case ate_kind_rtx_dtprel:
28889 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28890 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28891 DWARF2_ADDR_SIZE,
28892 entry->addr.rtl);
28893 fputc ('\n', asm_out_file);
28894 break;
28895 case ate_kind_label:
28896 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28897 "0x%x", entry->index);
28898 break;
28899 default:
28900 gcc_unreachable ();
28901 }
28902 return 1;
28903 }
28904
28905 /* A helper function for dwarf2out_finish. Counts the number
28906 of indexed addresses. Must match the logic of the functions
28907 output_addr_table_entry above. */
28908 int
28909 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
28910 {
28911 addr_table_entry *entry = *slot;
28912
28913 if (entry->refcount > 0)
28914 *last_idx += 1;
28915 return 1;
28916 }
28917
28918 /* Produce the .debug_addr section. */
28919
28920 static void
28921 output_addr_table (void)
28922 {
28923 unsigned int index = 0;
28924 if (addr_index_table == NULL || addr_index_table->size () == 0)
28925 return;
28926
28927 switch_to_section (debug_addr_section);
28928 addr_index_table
28929 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28930 }
28931
28932 #if ENABLE_ASSERT_CHECKING
28933 /* Verify that all marks are clear. */
28934
28935 static void
28936 verify_marks_clear (dw_die_ref die)
28937 {
28938 dw_die_ref c;
28939
28940 gcc_assert (! die->die_mark);
28941 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28942 }
28943 #endif /* ENABLE_ASSERT_CHECKING */
28944
28945 /* Clear the marks for a die and its children.
28946 Be cool if the mark isn't set. */
28947
28948 static void
28949 prune_unmark_dies (dw_die_ref die)
28950 {
28951 dw_die_ref c;
28952
28953 if (die->die_mark)
28954 die->die_mark = 0;
28955 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28956 }
28957
28958 /* Given LOC that is referenced by a DIE we're marking as used, find all
28959 referenced DWARF procedures it references and mark them as used. */
28960
28961 static void
28962 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28963 {
28964 for (; loc != NULL; loc = loc->dw_loc_next)
28965 switch (loc->dw_loc_opc)
28966 {
28967 case DW_OP_implicit_pointer:
28968 case DW_OP_convert:
28969 case DW_OP_reinterpret:
28970 case DW_OP_GNU_implicit_pointer:
28971 case DW_OP_GNU_convert:
28972 case DW_OP_GNU_reinterpret:
28973 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28974 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28975 break;
28976 case DW_OP_GNU_variable_value:
28977 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28978 {
28979 dw_die_ref ref
28980 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28981 if (ref == NULL)
28982 break;
28983 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28984 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28985 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28986 }
28987 /* FALLTHRU */
28988 case DW_OP_call2:
28989 case DW_OP_call4:
28990 case DW_OP_call_ref:
28991 case DW_OP_const_type:
28992 case DW_OP_GNU_const_type:
28993 case DW_OP_GNU_parameter_ref:
28994 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
28995 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28996 break;
28997 case DW_OP_regval_type:
28998 case DW_OP_deref_type:
28999 case DW_OP_GNU_regval_type:
29000 case DW_OP_GNU_deref_type:
29001 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29002 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29003 break;
29004 case DW_OP_entry_value:
29005 case DW_OP_GNU_entry_value:
29006 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29007 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29008 break;
29009 default:
29010 break;
29011 }
29012 }
29013
29014 /* Given DIE that we're marking as used, find any other dies
29015 it references as attributes and mark them as used. */
29016
29017 static void
29018 prune_unused_types_walk_attribs (dw_die_ref die)
29019 {
29020 dw_attr_node *a;
29021 unsigned ix;
29022
29023 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29024 {
29025 switch (AT_class (a))
29026 {
29027 /* Make sure DWARF procedures referenced by location descriptions will
29028 get emitted. */
29029 case dw_val_class_loc:
29030 prune_unused_types_walk_loc_descr (AT_loc (a));
29031 break;
29032 case dw_val_class_loc_list:
29033 for (dw_loc_list_ref list = AT_loc_list (a);
29034 list != NULL;
29035 list = list->dw_loc_next)
29036 prune_unused_types_walk_loc_descr (list->expr);
29037 break;
29038
29039 case dw_val_class_view_list:
29040 /* This points to a loc_list in another attribute, so it's
29041 already covered. */
29042 break;
29043
29044 case dw_val_class_die_ref:
29045 /* A reference to another DIE.
29046 Make sure that it will get emitted.
29047 If it was broken out into a comdat group, don't follow it. */
29048 if (! AT_ref (a)->comdat_type_p
29049 || a->dw_attr == DW_AT_specification)
29050 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29051 break;
29052
29053 case dw_val_class_str:
29054 /* Set the string's refcount to 0 so that prune_unused_types_mark
29055 accounts properly for it. */
29056 a->dw_attr_val.v.val_str->refcount = 0;
29057 break;
29058
29059 default:
29060 break;
29061 }
29062 }
29063 }
29064
29065 /* Mark the generic parameters and arguments children DIEs of DIE. */
29066
29067 static void
29068 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29069 {
29070 dw_die_ref c;
29071
29072 if (die == NULL || die->die_child == NULL)
29073 return;
29074 c = die->die_child;
29075 do
29076 {
29077 if (is_template_parameter (c))
29078 prune_unused_types_mark (c, 1);
29079 c = c->die_sib;
29080 } while (c && c != die->die_child);
29081 }
29082
29083 /* Mark DIE as being used. If DOKIDS is true, then walk down
29084 to DIE's children. */
29085
29086 static void
29087 prune_unused_types_mark (dw_die_ref die, int dokids)
29088 {
29089 dw_die_ref c;
29090
29091 if (die->die_mark == 0)
29092 {
29093 /* We haven't done this node yet. Mark it as used. */
29094 die->die_mark = 1;
29095 /* If this is the DIE of a generic type instantiation,
29096 mark the children DIEs that describe its generic parms and
29097 args. */
29098 prune_unused_types_mark_generic_parms_dies (die);
29099
29100 /* We also have to mark its parents as used.
29101 (But we don't want to mark our parent's kids due to this,
29102 unless it is a class.) */
29103 if (die->die_parent)
29104 prune_unused_types_mark (die->die_parent,
29105 class_scope_p (die->die_parent));
29106
29107 /* Mark any referenced nodes. */
29108 prune_unused_types_walk_attribs (die);
29109
29110 /* If this node is a specification,
29111 also mark the definition, if it exists. */
29112 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29113 prune_unused_types_mark (die->die_definition, 1);
29114 }
29115
29116 if (dokids && die->die_mark != 2)
29117 {
29118 /* We need to walk the children, but haven't done so yet.
29119 Remember that we've walked the kids. */
29120 die->die_mark = 2;
29121
29122 /* If this is an array type, we need to make sure our
29123 kids get marked, even if they're types. If we're
29124 breaking out types into comdat sections, do this
29125 for all type definitions. */
29126 if (die->die_tag == DW_TAG_array_type
29127 || (use_debug_types
29128 && is_type_die (die) && ! is_declaration_die (die)))
29129 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29130 else
29131 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29132 }
29133 }
29134
29135 /* For local classes, look if any static member functions were emitted
29136 and if so, mark them. */
29137
29138 static void
29139 prune_unused_types_walk_local_classes (dw_die_ref die)
29140 {
29141 dw_die_ref c;
29142
29143 if (die->die_mark == 2)
29144 return;
29145
29146 switch (die->die_tag)
29147 {
29148 case DW_TAG_structure_type:
29149 case DW_TAG_union_type:
29150 case DW_TAG_class_type:
29151 break;
29152
29153 case DW_TAG_subprogram:
29154 if (!get_AT_flag (die, DW_AT_declaration)
29155 || die->die_definition != NULL)
29156 prune_unused_types_mark (die, 1);
29157 return;
29158
29159 default:
29160 return;
29161 }
29162
29163 /* Mark children. */
29164 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29165 }
29166
29167 /* Walk the tree DIE and mark types that we actually use. */
29168
29169 static void
29170 prune_unused_types_walk (dw_die_ref die)
29171 {
29172 dw_die_ref c;
29173
29174 /* Don't do anything if this node is already marked and
29175 children have been marked as well. */
29176 if (die->die_mark == 2)
29177 return;
29178
29179 switch (die->die_tag)
29180 {
29181 case DW_TAG_structure_type:
29182 case DW_TAG_union_type:
29183 case DW_TAG_class_type:
29184 if (die->die_perennial_p)
29185 break;
29186
29187 for (c = die->die_parent; c; c = c->die_parent)
29188 if (c->die_tag == DW_TAG_subprogram)
29189 break;
29190
29191 /* Finding used static member functions inside of classes
29192 is needed just for local classes, because for other classes
29193 static member function DIEs with DW_AT_specification
29194 are emitted outside of the DW_TAG_*_type. If we ever change
29195 it, we'd need to call this even for non-local classes. */
29196 if (c)
29197 prune_unused_types_walk_local_classes (die);
29198
29199 /* It's a type node --- don't mark it. */
29200 return;
29201
29202 case DW_TAG_const_type:
29203 case DW_TAG_packed_type:
29204 case DW_TAG_pointer_type:
29205 case DW_TAG_reference_type:
29206 case DW_TAG_rvalue_reference_type:
29207 case DW_TAG_volatile_type:
29208 case DW_TAG_typedef:
29209 case DW_TAG_array_type:
29210 case DW_TAG_interface_type:
29211 case DW_TAG_friend:
29212 case DW_TAG_enumeration_type:
29213 case DW_TAG_subroutine_type:
29214 case DW_TAG_string_type:
29215 case DW_TAG_set_type:
29216 case DW_TAG_subrange_type:
29217 case DW_TAG_ptr_to_member_type:
29218 case DW_TAG_file_type:
29219 /* Type nodes are useful only when other DIEs reference them --- don't
29220 mark them. */
29221 /* FALLTHROUGH */
29222
29223 case DW_TAG_dwarf_procedure:
29224 /* Likewise for DWARF procedures. */
29225
29226 if (die->die_perennial_p)
29227 break;
29228
29229 return;
29230
29231 default:
29232 /* Mark everything else. */
29233 break;
29234 }
29235
29236 if (die->die_mark == 0)
29237 {
29238 die->die_mark = 1;
29239
29240 /* Now, mark any dies referenced from here. */
29241 prune_unused_types_walk_attribs (die);
29242 }
29243
29244 die->die_mark = 2;
29245
29246 /* Mark children. */
29247 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29248 }
29249
29250 /* Increment the string counts on strings referred to from DIE's
29251 attributes. */
29252
29253 static void
29254 prune_unused_types_update_strings (dw_die_ref die)
29255 {
29256 dw_attr_node *a;
29257 unsigned ix;
29258
29259 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29260 if (AT_class (a) == dw_val_class_str)
29261 {
29262 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29263 s->refcount++;
29264 /* Avoid unnecessarily putting strings that are used less than
29265 twice in the hash table. */
29266 if (s->refcount
29267 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29268 {
29269 indirect_string_node **slot
29270 = debug_str_hash->find_slot_with_hash (s->str,
29271 htab_hash_string (s->str),
29272 INSERT);
29273 gcc_assert (*slot == NULL);
29274 *slot = s;
29275 }
29276 }
29277 }
29278
29279 /* Mark DIE and its children as removed. */
29280
29281 static void
29282 mark_removed (dw_die_ref die)
29283 {
29284 dw_die_ref c;
29285 die->removed = true;
29286 FOR_EACH_CHILD (die, c, mark_removed (c));
29287 }
29288
29289 /* Remove from the tree DIE any dies that aren't marked. */
29290
29291 static void
29292 prune_unused_types_prune (dw_die_ref die)
29293 {
29294 dw_die_ref c;
29295
29296 gcc_assert (die->die_mark);
29297 prune_unused_types_update_strings (die);
29298
29299 if (! die->die_child)
29300 return;
29301
29302 c = die->die_child;
29303 do {
29304 dw_die_ref prev = c, next;
29305 for (c = c->die_sib; ! c->die_mark; c = next)
29306 if (c == die->die_child)
29307 {
29308 /* No marked children between 'prev' and the end of the list. */
29309 if (prev == c)
29310 /* No marked children at all. */
29311 die->die_child = NULL;
29312 else
29313 {
29314 prev->die_sib = c->die_sib;
29315 die->die_child = prev;
29316 }
29317 c->die_sib = NULL;
29318 mark_removed (c);
29319 return;
29320 }
29321 else
29322 {
29323 next = c->die_sib;
29324 c->die_sib = NULL;
29325 mark_removed (c);
29326 }
29327
29328 if (c != prev->die_sib)
29329 prev->die_sib = c;
29330 prune_unused_types_prune (c);
29331 } while (c != die->die_child);
29332 }
29333
29334 /* Remove dies representing declarations that we never use. */
29335
29336 static void
29337 prune_unused_types (void)
29338 {
29339 unsigned int i;
29340 limbo_die_node *node;
29341 comdat_type_node *ctnode;
29342 pubname_entry *pub;
29343 dw_die_ref base_type;
29344
29345 #if ENABLE_ASSERT_CHECKING
29346 /* All the marks should already be clear. */
29347 verify_marks_clear (comp_unit_die ());
29348 for (node = limbo_die_list; node; node = node->next)
29349 verify_marks_clear (node->die);
29350 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29351 verify_marks_clear (ctnode->root_die);
29352 #endif /* ENABLE_ASSERT_CHECKING */
29353
29354 /* Mark types that are used in global variables. */
29355 premark_types_used_by_global_vars ();
29356
29357 /* Set the mark on nodes that are actually used. */
29358 prune_unused_types_walk (comp_unit_die ());
29359 for (node = limbo_die_list; node; node = node->next)
29360 prune_unused_types_walk (node->die);
29361 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29362 {
29363 prune_unused_types_walk (ctnode->root_die);
29364 prune_unused_types_mark (ctnode->type_die, 1);
29365 }
29366
29367 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29368 are unusual in that they are pubnames that are the children of pubtypes.
29369 They should only be marked via their parent DW_TAG_enumeration_type die,
29370 not as roots in themselves. */
29371 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29372 if (pub->die->die_tag != DW_TAG_enumerator)
29373 prune_unused_types_mark (pub->die, 1);
29374 for (i = 0; base_types.iterate (i, &base_type); i++)
29375 prune_unused_types_mark (base_type, 1);
29376
29377 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29378 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29379 callees). */
29380 cgraph_node *cnode;
29381 FOR_EACH_FUNCTION (cnode)
29382 if (cnode->referred_to_p (false))
29383 {
29384 dw_die_ref die = lookup_decl_die (cnode->decl);
29385 if (die == NULL || die->die_mark)
29386 continue;
29387 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29388 if (e->caller != cnode
29389 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29390 {
29391 prune_unused_types_mark (die, 1);
29392 break;
29393 }
29394 }
29395
29396 if (debug_str_hash)
29397 debug_str_hash->empty ();
29398 if (skeleton_debug_str_hash)
29399 skeleton_debug_str_hash->empty ();
29400 prune_unused_types_prune (comp_unit_die ());
29401 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29402 {
29403 node = *pnode;
29404 if (!node->die->die_mark)
29405 *pnode = node->next;
29406 else
29407 {
29408 prune_unused_types_prune (node->die);
29409 pnode = &node->next;
29410 }
29411 }
29412 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29413 prune_unused_types_prune (ctnode->root_die);
29414
29415 /* Leave the marks clear. */
29416 prune_unmark_dies (comp_unit_die ());
29417 for (node = limbo_die_list; node; node = node->next)
29418 prune_unmark_dies (node->die);
29419 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29420 prune_unmark_dies (ctnode->root_die);
29421 }
29422
29423 /* Helpers to manipulate hash table of comdat type units. */
29424
29425 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29426 {
29427 static inline hashval_t hash (const comdat_type_node *);
29428 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29429 };
29430
29431 inline hashval_t
29432 comdat_type_hasher::hash (const comdat_type_node *type_node)
29433 {
29434 hashval_t h;
29435 memcpy (&h, type_node->signature, sizeof (h));
29436 return h;
29437 }
29438
29439 inline bool
29440 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29441 const comdat_type_node *type_node_2)
29442 {
29443 return (! memcmp (type_node_1->signature, type_node_2->signature,
29444 DWARF_TYPE_SIGNATURE_SIZE));
29445 }
29446
29447 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29448 to the location it would have been added, should we know its
29449 DECL_ASSEMBLER_NAME when we added other attributes. This will
29450 probably improve compactness of debug info, removing equivalent
29451 abbrevs, and hide any differences caused by deferring the
29452 computation of the assembler name, triggered by e.g. PCH. */
29453
29454 static inline void
29455 move_linkage_attr (dw_die_ref die)
29456 {
29457 unsigned ix = vec_safe_length (die->die_attr);
29458 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29459
29460 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29461 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29462
29463 while (--ix > 0)
29464 {
29465 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29466
29467 if (prev->dw_attr == DW_AT_decl_line
29468 || prev->dw_attr == DW_AT_decl_column
29469 || prev->dw_attr == DW_AT_name)
29470 break;
29471 }
29472
29473 if (ix != vec_safe_length (die->die_attr) - 1)
29474 {
29475 die->die_attr->pop ();
29476 die->die_attr->quick_insert (ix, linkage);
29477 }
29478 }
29479
29480 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29481 referenced from typed stack ops and count how often they are used. */
29482
29483 static void
29484 mark_base_types (dw_loc_descr_ref loc)
29485 {
29486 dw_die_ref base_type = NULL;
29487
29488 for (; loc; loc = loc->dw_loc_next)
29489 {
29490 switch (loc->dw_loc_opc)
29491 {
29492 case DW_OP_regval_type:
29493 case DW_OP_deref_type:
29494 case DW_OP_GNU_regval_type:
29495 case DW_OP_GNU_deref_type:
29496 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29497 break;
29498 case DW_OP_convert:
29499 case DW_OP_reinterpret:
29500 case DW_OP_GNU_convert:
29501 case DW_OP_GNU_reinterpret:
29502 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29503 continue;
29504 /* FALLTHRU */
29505 case DW_OP_const_type:
29506 case DW_OP_GNU_const_type:
29507 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29508 break;
29509 case DW_OP_entry_value:
29510 case DW_OP_GNU_entry_value:
29511 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29512 continue;
29513 default:
29514 continue;
29515 }
29516 gcc_assert (base_type->die_parent == comp_unit_die ());
29517 if (base_type->die_mark)
29518 base_type->die_mark++;
29519 else
29520 {
29521 base_types.safe_push (base_type);
29522 base_type->die_mark = 1;
29523 }
29524 }
29525 }
29526
29527 /* Comparison function for sorting marked base types. */
29528
29529 static int
29530 base_type_cmp (const void *x, const void *y)
29531 {
29532 dw_die_ref dx = *(const dw_die_ref *) x;
29533 dw_die_ref dy = *(const dw_die_ref *) y;
29534 unsigned int byte_size1, byte_size2;
29535 unsigned int encoding1, encoding2;
29536 unsigned int align1, align2;
29537 if (dx->die_mark > dy->die_mark)
29538 return -1;
29539 if (dx->die_mark < dy->die_mark)
29540 return 1;
29541 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29542 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29543 if (byte_size1 < byte_size2)
29544 return 1;
29545 if (byte_size1 > byte_size2)
29546 return -1;
29547 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29548 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29549 if (encoding1 < encoding2)
29550 return 1;
29551 if (encoding1 > encoding2)
29552 return -1;
29553 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29554 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29555 if (align1 < align2)
29556 return 1;
29557 if (align1 > align2)
29558 return -1;
29559 return 0;
29560 }
29561
29562 /* Move base types marked by mark_base_types as early as possible
29563 in the CU, sorted by decreasing usage count both to make the
29564 uleb128 references as small as possible and to make sure they
29565 will have die_offset already computed by calc_die_sizes when
29566 sizes of typed stack loc ops is computed. */
29567
29568 static void
29569 move_marked_base_types (void)
29570 {
29571 unsigned int i;
29572 dw_die_ref base_type, die, c;
29573
29574 if (base_types.is_empty ())
29575 return;
29576
29577 /* Sort by decreasing usage count, they will be added again in that
29578 order later on. */
29579 base_types.qsort (base_type_cmp);
29580 die = comp_unit_die ();
29581 c = die->die_child;
29582 do
29583 {
29584 dw_die_ref prev = c;
29585 c = c->die_sib;
29586 while (c->die_mark)
29587 {
29588 remove_child_with_prev (c, prev);
29589 /* As base types got marked, there must be at least
29590 one node other than DW_TAG_base_type. */
29591 gcc_assert (die->die_child != NULL);
29592 c = prev->die_sib;
29593 }
29594 }
29595 while (c != die->die_child);
29596 gcc_assert (die->die_child);
29597 c = die->die_child;
29598 for (i = 0; base_types.iterate (i, &base_type); i++)
29599 {
29600 base_type->die_mark = 0;
29601 base_type->die_sib = c->die_sib;
29602 c->die_sib = base_type;
29603 c = base_type;
29604 }
29605 }
29606
29607 /* Helper function for resolve_addr, attempt to resolve
29608 one CONST_STRING, return true if successful. Similarly verify that
29609 SYMBOL_REFs refer to variables emitted in the current CU. */
29610
29611 static bool
29612 resolve_one_addr (rtx *addr)
29613 {
29614 rtx rtl = *addr;
29615
29616 if (GET_CODE (rtl) == CONST_STRING)
29617 {
29618 size_t len = strlen (XSTR (rtl, 0)) + 1;
29619 tree t = build_string (len, XSTR (rtl, 0));
29620 tree tlen = size_int (len - 1);
29621 TREE_TYPE (t)
29622 = build_array_type (char_type_node, build_index_type (tlen));
29623 rtl = lookup_constant_def (t);
29624 if (!rtl || !MEM_P (rtl))
29625 return false;
29626 rtl = XEXP (rtl, 0);
29627 if (GET_CODE (rtl) == SYMBOL_REF
29628 && SYMBOL_REF_DECL (rtl)
29629 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29630 return false;
29631 vec_safe_push (used_rtx_array, rtl);
29632 *addr = rtl;
29633 return true;
29634 }
29635
29636 if (GET_CODE (rtl) == SYMBOL_REF
29637 && SYMBOL_REF_DECL (rtl))
29638 {
29639 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29640 {
29641 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29642 return false;
29643 }
29644 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29645 return false;
29646 }
29647
29648 if (GET_CODE (rtl) == CONST)
29649 {
29650 subrtx_ptr_iterator::array_type array;
29651 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29652 if (!resolve_one_addr (*iter))
29653 return false;
29654 }
29655
29656 return true;
29657 }
29658
29659 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29660 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29661 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29662
29663 static rtx
29664 string_cst_pool_decl (tree t)
29665 {
29666 rtx rtl = output_constant_def (t, 1);
29667 unsigned char *array;
29668 dw_loc_descr_ref l;
29669 tree decl;
29670 size_t len;
29671 dw_die_ref ref;
29672
29673 if (!rtl || !MEM_P (rtl))
29674 return NULL_RTX;
29675 rtl = XEXP (rtl, 0);
29676 if (GET_CODE (rtl) != SYMBOL_REF
29677 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29678 return NULL_RTX;
29679
29680 decl = SYMBOL_REF_DECL (rtl);
29681 if (!lookup_decl_die (decl))
29682 {
29683 len = TREE_STRING_LENGTH (t);
29684 vec_safe_push (used_rtx_array, rtl);
29685 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29686 array = ggc_vec_alloc<unsigned char> (len);
29687 memcpy (array, TREE_STRING_POINTER (t), len);
29688 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29689 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29690 l->dw_loc_oprnd2.v.val_vec.length = len;
29691 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29692 l->dw_loc_oprnd2.v.val_vec.array = array;
29693 add_AT_loc (ref, DW_AT_location, l);
29694 equate_decl_number_to_die (decl, ref);
29695 }
29696 return rtl;
29697 }
29698
29699 /* Helper function of resolve_addr_in_expr. LOC is
29700 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29701 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29702 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29703 with DW_OP_implicit_pointer if possible
29704 and return true, if unsuccessful, return false. */
29705
29706 static bool
29707 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29708 {
29709 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29710 HOST_WIDE_INT offset = 0;
29711 dw_die_ref ref = NULL;
29712 tree decl;
29713
29714 if (GET_CODE (rtl) == CONST
29715 && GET_CODE (XEXP (rtl, 0)) == PLUS
29716 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29717 {
29718 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29719 rtl = XEXP (XEXP (rtl, 0), 0);
29720 }
29721 if (GET_CODE (rtl) == CONST_STRING)
29722 {
29723 size_t len = strlen (XSTR (rtl, 0)) + 1;
29724 tree t = build_string (len, XSTR (rtl, 0));
29725 tree tlen = size_int (len - 1);
29726
29727 TREE_TYPE (t)
29728 = build_array_type (char_type_node, build_index_type (tlen));
29729 rtl = string_cst_pool_decl (t);
29730 if (!rtl)
29731 return false;
29732 }
29733 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29734 {
29735 decl = SYMBOL_REF_DECL (rtl);
29736 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29737 {
29738 ref = lookup_decl_die (decl);
29739 if (ref && (get_AT (ref, DW_AT_location)
29740 || get_AT (ref, DW_AT_const_value)))
29741 {
29742 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29743 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29744 loc->dw_loc_oprnd1.val_entry = NULL;
29745 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29746 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29747 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29748 loc->dw_loc_oprnd2.v.val_int = offset;
29749 return true;
29750 }
29751 }
29752 }
29753 return false;
29754 }
29755
29756 /* Helper function for resolve_addr, handle one location
29757 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29758 the location list couldn't be resolved. */
29759
29760 static bool
29761 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29762 {
29763 dw_loc_descr_ref keep = NULL;
29764 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29765 switch (loc->dw_loc_opc)
29766 {
29767 case DW_OP_addr:
29768 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29769 {
29770 if ((prev == NULL
29771 || prev->dw_loc_opc == DW_OP_piece
29772 || prev->dw_loc_opc == DW_OP_bit_piece)
29773 && loc->dw_loc_next
29774 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29775 && (!dwarf_strict || dwarf_version >= 5)
29776 && optimize_one_addr_into_implicit_ptr (loc))
29777 break;
29778 return false;
29779 }
29780 break;
29781 case DW_OP_GNU_addr_index:
29782 case DW_OP_addrx:
29783 case DW_OP_GNU_const_index:
29784 case DW_OP_constx:
29785 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29786 || loc->dw_loc_opc == DW_OP_addrx)
29787 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29788 || loc->dw_loc_opc == DW_OP_constx)
29789 && loc->dtprel))
29790 {
29791 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29792 if (!resolve_one_addr (&rtl))
29793 return false;
29794 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29795 loc->dw_loc_oprnd1.val_entry
29796 = add_addr_table_entry (rtl, ate_kind_rtx);
29797 }
29798 break;
29799 case DW_OP_const4u:
29800 case DW_OP_const8u:
29801 if (loc->dtprel
29802 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29803 return false;
29804 break;
29805 case DW_OP_plus_uconst:
29806 if (size_of_loc_descr (loc)
29807 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29808 + 1
29809 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29810 {
29811 dw_loc_descr_ref repl
29812 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29813 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29814 add_loc_descr (&repl, loc->dw_loc_next);
29815 *loc = *repl;
29816 }
29817 break;
29818 case DW_OP_implicit_value:
29819 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29820 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29821 return false;
29822 break;
29823 case DW_OP_implicit_pointer:
29824 case DW_OP_GNU_implicit_pointer:
29825 case DW_OP_GNU_parameter_ref:
29826 case DW_OP_GNU_variable_value:
29827 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29828 {
29829 dw_die_ref ref
29830 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29831 if (ref == NULL)
29832 return false;
29833 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29834 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29835 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29836 }
29837 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29838 {
29839 if (prev == NULL
29840 && loc->dw_loc_next == NULL
29841 && AT_class (a) == dw_val_class_loc)
29842 switch (a->dw_attr)
29843 {
29844 /* Following attributes allow both exprloc and reference,
29845 so if the whole expression is DW_OP_GNU_variable_value
29846 alone we could transform it into reference. */
29847 case DW_AT_byte_size:
29848 case DW_AT_bit_size:
29849 case DW_AT_lower_bound:
29850 case DW_AT_upper_bound:
29851 case DW_AT_bit_stride:
29852 case DW_AT_count:
29853 case DW_AT_allocated:
29854 case DW_AT_associated:
29855 case DW_AT_byte_stride:
29856 a->dw_attr_val.val_class = dw_val_class_die_ref;
29857 a->dw_attr_val.val_entry = NULL;
29858 a->dw_attr_val.v.val_die_ref.die
29859 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29860 a->dw_attr_val.v.val_die_ref.external = 0;
29861 return true;
29862 default:
29863 break;
29864 }
29865 if (dwarf_strict)
29866 return false;
29867 }
29868 break;
29869 case DW_OP_const_type:
29870 case DW_OP_regval_type:
29871 case DW_OP_deref_type:
29872 case DW_OP_convert:
29873 case DW_OP_reinterpret:
29874 case DW_OP_GNU_const_type:
29875 case DW_OP_GNU_regval_type:
29876 case DW_OP_GNU_deref_type:
29877 case DW_OP_GNU_convert:
29878 case DW_OP_GNU_reinterpret:
29879 while (loc->dw_loc_next
29880 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29881 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29882 {
29883 dw_die_ref base1, base2;
29884 unsigned enc1, enc2, size1, size2;
29885 if (loc->dw_loc_opc == DW_OP_regval_type
29886 || loc->dw_loc_opc == DW_OP_deref_type
29887 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29888 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29889 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29890 else if (loc->dw_loc_oprnd1.val_class
29891 == dw_val_class_unsigned_const)
29892 break;
29893 else
29894 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29895 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29896 == dw_val_class_unsigned_const)
29897 break;
29898 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29899 gcc_assert (base1->die_tag == DW_TAG_base_type
29900 && base2->die_tag == DW_TAG_base_type);
29901 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29902 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29903 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29904 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29905 if (size1 == size2
29906 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29907 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29908 && loc != keep)
29909 || enc1 == enc2))
29910 {
29911 /* Optimize away next DW_OP_convert after
29912 adjusting LOC's base type die reference. */
29913 if (loc->dw_loc_opc == DW_OP_regval_type
29914 || loc->dw_loc_opc == DW_OP_deref_type
29915 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29916 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29917 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29918 else
29919 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29920 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29921 continue;
29922 }
29923 /* Don't change integer DW_OP_convert after e.g. floating
29924 point typed stack entry. */
29925 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29926 keep = loc->dw_loc_next;
29927 break;
29928 }
29929 break;
29930 default:
29931 break;
29932 }
29933 return true;
29934 }
29935
29936 /* Helper function of resolve_addr. DIE had DW_AT_location of
29937 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29938 and DW_OP_addr couldn't be resolved. resolve_addr has already
29939 removed the DW_AT_location attribute. This function attempts to
29940 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29941 to it or DW_AT_const_value attribute, if possible. */
29942
29943 static void
29944 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29945 {
29946 if (!VAR_P (decl)
29947 || lookup_decl_die (decl) != die
29948 || DECL_EXTERNAL (decl)
29949 || !TREE_STATIC (decl)
29950 || DECL_INITIAL (decl) == NULL_TREE
29951 || DECL_P (DECL_INITIAL (decl))
29952 || get_AT (die, DW_AT_const_value))
29953 return;
29954
29955 tree init = DECL_INITIAL (decl);
29956 HOST_WIDE_INT offset = 0;
29957 /* For variables that have been optimized away and thus
29958 don't have a memory location, see if we can emit
29959 DW_AT_const_value instead. */
29960 if (tree_add_const_value_attribute (die, init))
29961 return;
29962 if (dwarf_strict && dwarf_version < 5)
29963 return;
29964 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29965 and ADDR_EXPR refers to a decl that has DW_AT_location or
29966 DW_AT_const_value (but isn't addressable, otherwise
29967 resolving the original DW_OP_addr wouldn't fail), see if
29968 we can add DW_OP_implicit_pointer. */
29969 STRIP_NOPS (init);
29970 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29971 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29972 {
29973 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29974 init = TREE_OPERAND (init, 0);
29975 STRIP_NOPS (init);
29976 }
29977 if (TREE_CODE (init) != ADDR_EXPR)
29978 return;
29979 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29980 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
29981 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
29982 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
29983 && TREE_OPERAND (init, 0) != decl))
29984 {
29985 dw_die_ref ref;
29986 dw_loc_descr_ref l;
29987
29988 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
29989 {
29990 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
29991 if (!rtl)
29992 return;
29993 decl = SYMBOL_REF_DECL (rtl);
29994 }
29995 else
29996 decl = TREE_OPERAND (init, 0);
29997 ref = lookup_decl_die (decl);
29998 if (ref == NULL
29999 || (!get_AT (ref, DW_AT_location)
30000 && !get_AT (ref, DW_AT_const_value)))
30001 return;
30002 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30003 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30004 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30005 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30006 add_AT_loc (die, DW_AT_location, l);
30007 }
30008 }
30009
30010 /* Return NULL if l is a DWARF expression, or first op that is not
30011 valid DWARF expression. */
30012
30013 static dw_loc_descr_ref
30014 non_dwarf_expression (dw_loc_descr_ref l)
30015 {
30016 while (l)
30017 {
30018 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30019 return l;
30020 switch (l->dw_loc_opc)
30021 {
30022 case DW_OP_regx:
30023 case DW_OP_implicit_value:
30024 case DW_OP_stack_value:
30025 case DW_OP_implicit_pointer:
30026 case DW_OP_GNU_implicit_pointer:
30027 case DW_OP_GNU_parameter_ref:
30028 case DW_OP_piece:
30029 case DW_OP_bit_piece:
30030 return l;
30031 default:
30032 break;
30033 }
30034 l = l->dw_loc_next;
30035 }
30036 return NULL;
30037 }
30038
30039 /* Return adjusted copy of EXPR:
30040 If it is empty DWARF expression, return it.
30041 If it is valid non-empty DWARF expression,
30042 return copy of EXPR with DW_OP_deref appended to it.
30043 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30044 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30045 If it is DWARF expression followed by DW_OP_stack_value, return
30046 copy of the DWARF expression without anything appended.
30047 Otherwise, return NULL. */
30048
30049 static dw_loc_descr_ref
30050 copy_deref_exprloc (dw_loc_descr_ref expr)
30051 {
30052 dw_loc_descr_ref tail = NULL;
30053
30054 if (expr == NULL)
30055 return NULL;
30056
30057 dw_loc_descr_ref l = non_dwarf_expression (expr);
30058 if (l && l->dw_loc_next)
30059 return NULL;
30060
30061 if (l)
30062 {
30063 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30064 tail = new_loc_descr ((enum dwarf_location_atom)
30065 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30066 0, 0);
30067 else
30068 switch (l->dw_loc_opc)
30069 {
30070 case DW_OP_regx:
30071 tail = new_loc_descr (DW_OP_bregx,
30072 l->dw_loc_oprnd1.v.val_unsigned, 0);
30073 break;
30074 case DW_OP_stack_value:
30075 break;
30076 default:
30077 return NULL;
30078 }
30079 }
30080 else
30081 tail = new_loc_descr (DW_OP_deref, 0, 0);
30082
30083 dw_loc_descr_ref ret = NULL, *p = &ret;
30084 while (expr != l)
30085 {
30086 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30087 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30088 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30089 p = &(*p)->dw_loc_next;
30090 expr = expr->dw_loc_next;
30091 }
30092 *p = tail;
30093 return ret;
30094 }
30095
30096 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30097 reference to a variable or argument, adjust it if needed and return:
30098 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30099 attribute if present should be removed
30100 0 keep the attribute perhaps with minor modifications, no need to rescan
30101 1 if the attribute has been successfully adjusted. */
30102
30103 static int
30104 optimize_string_length (dw_attr_node *a)
30105 {
30106 dw_loc_descr_ref l = AT_loc (a), lv;
30107 dw_die_ref die;
30108 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30109 {
30110 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30111 die = lookup_decl_die (decl);
30112 if (die)
30113 {
30114 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30115 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30116 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30117 }
30118 else
30119 return -1;
30120 }
30121 else
30122 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30123
30124 /* DWARF5 allows reference class, so we can then reference the DIE.
30125 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30126 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30127 {
30128 a->dw_attr_val.val_class = dw_val_class_die_ref;
30129 a->dw_attr_val.val_entry = NULL;
30130 a->dw_attr_val.v.val_die_ref.die = die;
30131 a->dw_attr_val.v.val_die_ref.external = 0;
30132 return 0;
30133 }
30134
30135 dw_attr_node *av = get_AT (die, DW_AT_location);
30136 dw_loc_list_ref d;
30137 bool non_dwarf_expr = false;
30138
30139 if (av == NULL)
30140 return dwarf_strict ? -1 : 0;
30141 switch (AT_class (av))
30142 {
30143 case dw_val_class_loc_list:
30144 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30145 if (d->expr && non_dwarf_expression (d->expr))
30146 non_dwarf_expr = true;
30147 break;
30148 case dw_val_class_view_list:
30149 gcc_unreachable ();
30150 case dw_val_class_loc:
30151 lv = AT_loc (av);
30152 if (lv == NULL)
30153 return dwarf_strict ? -1 : 0;
30154 if (non_dwarf_expression (lv))
30155 non_dwarf_expr = true;
30156 break;
30157 default:
30158 return dwarf_strict ? -1 : 0;
30159 }
30160
30161 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30162 into DW_OP_call4 or DW_OP_GNU_variable_value into
30163 DW_OP_call4 DW_OP_deref, do so. */
30164 if (!non_dwarf_expr
30165 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30166 {
30167 l->dw_loc_opc = DW_OP_call4;
30168 if (l->dw_loc_next)
30169 l->dw_loc_next = NULL;
30170 else
30171 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30172 return 0;
30173 }
30174
30175 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30176 copy over the DW_AT_location attribute from die to a. */
30177 if (l->dw_loc_next != NULL)
30178 {
30179 a->dw_attr_val = av->dw_attr_val;
30180 return 1;
30181 }
30182
30183 dw_loc_list_ref list, *p;
30184 switch (AT_class (av))
30185 {
30186 case dw_val_class_loc_list:
30187 p = &list;
30188 list = NULL;
30189 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30190 {
30191 lv = copy_deref_exprloc (d->expr);
30192 if (lv)
30193 {
30194 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30195 p = &(*p)->dw_loc_next;
30196 }
30197 else if (!dwarf_strict && d->expr)
30198 return 0;
30199 }
30200 if (list == NULL)
30201 return dwarf_strict ? -1 : 0;
30202 a->dw_attr_val.val_class = dw_val_class_loc_list;
30203 gen_llsym (list);
30204 *AT_loc_list_ptr (a) = list;
30205 return 1;
30206 case dw_val_class_loc:
30207 lv = copy_deref_exprloc (AT_loc (av));
30208 if (lv == NULL)
30209 return dwarf_strict ? -1 : 0;
30210 a->dw_attr_val.v.val_loc = lv;
30211 return 1;
30212 default:
30213 gcc_unreachable ();
30214 }
30215 }
30216
30217 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30218 an address in .rodata section if the string literal is emitted there,
30219 or remove the containing location list or replace DW_AT_const_value
30220 with DW_AT_location and empty location expression, if it isn't found
30221 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30222 to something that has been emitted in the current CU. */
30223
30224 static void
30225 resolve_addr (dw_die_ref die)
30226 {
30227 dw_die_ref c;
30228 dw_attr_node *a;
30229 dw_loc_list_ref *curr, *start, loc;
30230 unsigned ix;
30231 bool remove_AT_byte_size = false;
30232
30233 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30234 switch (AT_class (a))
30235 {
30236 case dw_val_class_loc_list:
30237 start = curr = AT_loc_list_ptr (a);
30238 loc = *curr;
30239 gcc_assert (loc);
30240 /* The same list can be referenced more than once. See if we have
30241 already recorded the result from a previous pass. */
30242 if (loc->replaced)
30243 *curr = loc->dw_loc_next;
30244 else if (!loc->resolved_addr)
30245 {
30246 /* As things stand, we do not expect or allow one die to
30247 reference a suffix of another die's location list chain.
30248 References must be identical or completely separate.
30249 There is therefore no need to cache the result of this
30250 pass on any list other than the first; doing so
30251 would lead to unnecessary writes. */
30252 while (*curr)
30253 {
30254 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30255 if (!resolve_addr_in_expr (a, (*curr)->expr))
30256 {
30257 dw_loc_list_ref next = (*curr)->dw_loc_next;
30258 dw_loc_descr_ref l = (*curr)->expr;
30259
30260 if (next && (*curr)->ll_symbol)
30261 {
30262 gcc_assert (!next->ll_symbol);
30263 next->ll_symbol = (*curr)->ll_symbol;
30264 next->vl_symbol = (*curr)->vl_symbol;
30265 }
30266 if (dwarf_split_debug_info)
30267 remove_loc_list_addr_table_entries (l);
30268 *curr = next;
30269 }
30270 else
30271 {
30272 mark_base_types ((*curr)->expr);
30273 curr = &(*curr)->dw_loc_next;
30274 }
30275 }
30276 if (loc == *start)
30277 loc->resolved_addr = 1;
30278 else
30279 {
30280 loc->replaced = 1;
30281 loc->dw_loc_next = *start;
30282 }
30283 }
30284 if (!*start)
30285 {
30286 remove_AT (die, a->dw_attr);
30287 ix--;
30288 }
30289 break;
30290 case dw_val_class_view_list:
30291 {
30292 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30293 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30294 dw_val_node *llnode
30295 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30296 /* If we no longer have a loclist, or it no longer needs
30297 views, drop this attribute. */
30298 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30299 {
30300 remove_AT (die, a->dw_attr);
30301 ix--;
30302 }
30303 break;
30304 }
30305 case dw_val_class_loc:
30306 {
30307 dw_loc_descr_ref l = AT_loc (a);
30308 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30309 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30310 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30311 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30312 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30313 with DW_FORM_ref referencing the same DIE as
30314 DW_OP_GNU_variable_value used to reference. */
30315 if (a->dw_attr == DW_AT_string_length
30316 && l
30317 && l->dw_loc_opc == DW_OP_GNU_variable_value
30318 && (l->dw_loc_next == NULL
30319 || (l->dw_loc_next->dw_loc_next == NULL
30320 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30321 {
30322 switch (optimize_string_length (a))
30323 {
30324 case -1:
30325 remove_AT (die, a->dw_attr);
30326 ix--;
30327 /* If we drop DW_AT_string_length, we need to drop also
30328 DW_AT_{string_length_,}byte_size. */
30329 remove_AT_byte_size = true;
30330 continue;
30331 default:
30332 break;
30333 case 1:
30334 /* Even if we keep the optimized DW_AT_string_length,
30335 it might have changed AT_class, so process it again. */
30336 ix--;
30337 continue;
30338 }
30339 }
30340 /* For -gdwarf-2 don't attempt to optimize
30341 DW_AT_data_member_location containing
30342 DW_OP_plus_uconst - older consumers might
30343 rely on it being that op instead of a more complex,
30344 but shorter, location description. */
30345 if ((dwarf_version > 2
30346 || a->dw_attr != DW_AT_data_member_location
30347 || l == NULL
30348 || l->dw_loc_opc != DW_OP_plus_uconst
30349 || l->dw_loc_next != NULL)
30350 && !resolve_addr_in_expr (a, l))
30351 {
30352 if (dwarf_split_debug_info)
30353 remove_loc_list_addr_table_entries (l);
30354 if (l != NULL
30355 && l->dw_loc_next == NULL
30356 && l->dw_loc_opc == DW_OP_addr
30357 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30358 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30359 && a->dw_attr == DW_AT_location)
30360 {
30361 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30362 remove_AT (die, a->dw_attr);
30363 ix--;
30364 optimize_location_into_implicit_ptr (die, decl);
30365 break;
30366 }
30367 if (a->dw_attr == DW_AT_string_length)
30368 /* If we drop DW_AT_string_length, we need to drop also
30369 DW_AT_{string_length_,}byte_size. */
30370 remove_AT_byte_size = true;
30371 remove_AT (die, a->dw_attr);
30372 ix--;
30373 }
30374 else
30375 mark_base_types (l);
30376 }
30377 break;
30378 case dw_val_class_addr:
30379 if (a->dw_attr == DW_AT_const_value
30380 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30381 {
30382 if (AT_index (a) != NOT_INDEXED)
30383 remove_addr_table_entry (a->dw_attr_val.val_entry);
30384 remove_AT (die, a->dw_attr);
30385 ix--;
30386 }
30387 if ((die->die_tag == DW_TAG_call_site
30388 && a->dw_attr == DW_AT_call_origin)
30389 || (die->die_tag == DW_TAG_GNU_call_site
30390 && a->dw_attr == DW_AT_abstract_origin))
30391 {
30392 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30393 dw_die_ref tdie = lookup_decl_die (tdecl);
30394 dw_die_ref cdie;
30395 if (tdie == NULL
30396 && DECL_EXTERNAL (tdecl)
30397 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30398 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30399 {
30400 dw_die_ref pdie = cdie;
30401 /* Make sure we don't add these DIEs into type units.
30402 We could emit skeleton DIEs for context (namespaces,
30403 outer structs/classes) and a skeleton DIE for the
30404 innermost context with DW_AT_signature pointing to the
30405 type unit. See PR78835. */
30406 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30407 pdie = pdie->die_parent;
30408 if (pdie == NULL)
30409 {
30410 /* Creating a full DIE for tdecl is overly expensive and
30411 at this point even wrong when in the LTO phase
30412 as it can end up generating new type DIEs we didn't
30413 output and thus optimize_external_refs will crash. */
30414 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30415 add_AT_flag (tdie, DW_AT_external, 1);
30416 add_AT_flag (tdie, DW_AT_declaration, 1);
30417 add_linkage_attr (tdie, tdecl);
30418 add_name_and_src_coords_attributes (tdie, tdecl, true);
30419 equate_decl_number_to_die (tdecl, tdie);
30420 }
30421 }
30422 if (tdie)
30423 {
30424 a->dw_attr_val.val_class = dw_val_class_die_ref;
30425 a->dw_attr_val.v.val_die_ref.die = tdie;
30426 a->dw_attr_val.v.val_die_ref.external = 0;
30427 }
30428 else
30429 {
30430 if (AT_index (a) != NOT_INDEXED)
30431 remove_addr_table_entry (a->dw_attr_val.val_entry);
30432 remove_AT (die, a->dw_attr);
30433 ix--;
30434 }
30435 }
30436 break;
30437 default:
30438 break;
30439 }
30440
30441 if (remove_AT_byte_size)
30442 remove_AT (die, dwarf_version >= 5
30443 ? DW_AT_string_length_byte_size
30444 : DW_AT_byte_size);
30445
30446 FOR_EACH_CHILD (die, c, resolve_addr (c));
30447 }
30448 \f
30449 /* Helper routines for optimize_location_lists.
30450 This pass tries to share identical local lists in .debug_loc
30451 section. */
30452
30453 /* Iteratively hash operands of LOC opcode into HSTATE. */
30454
30455 static void
30456 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30457 {
30458 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30459 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30460
30461 switch (loc->dw_loc_opc)
30462 {
30463 case DW_OP_const4u:
30464 case DW_OP_const8u:
30465 if (loc->dtprel)
30466 goto hash_addr;
30467 /* FALLTHRU */
30468 case DW_OP_const1u:
30469 case DW_OP_const1s:
30470 case DW_OP_const2u:
30471 case DW_OP_const2s:
30472 case DW_OP_const4s:
30473 case DW_OP_const8s:
30474 case DW_OP_constu:
30475 case DW_OP_consts:
30476 case DW_OP_pick:
30477 case DW_OP_plus_uconst:
30478 case DW_OP_breg0:
30479 case DW_OP_breg1:
30480 case DW_OP_breg2:
30481 case DW_OP_breg3:
30482 case DW_OP_breg4:
30483 case DW_OP_breg5:
30484 case DW_OP_breg6:
30485 case DW_OP_breg7:
30486 case DW_OP_breg8:
30487 case DW_OP_breg9:
30488 case DW_OP_breg10:
30489 case DW_OP_breg11:
30490 case DW_OP_breg12:
30491 case DW_OP_breg13:
30492 case DW_OP_breg14:
30493 case DW_OP_breg15:
30494 case DW_OP_breg16:
30495 case DW_OP_breg17:
30496 case DW_OP_breg18:
30497 case DW_OP_breg19:
30498 case DW_OP_breg20:
30499 case DW_OP_breg21:
30500 case DW_OP_breg22:
30501 case DW_OP_breg23:
30502 case DW_OP_breg24:
30503 case DW_OP_breg25:
30504 case DW_OP_breg26:
30505 case DW_OP_breg27:
30506 case DW_OP_breg28:
30507 case DW_OP_breg29:
30508 case DW_OP_breg30:
30509 case DW_OP_breg31:
30510 case DW_OP_regx:
30511 case DW_OP_fbreg:
30512 case DW_OP_piece:
30513 case DW_OP_deref_size:
30514 case DW_OP_xderef_size:
30515 hstate.add_object (val1->v.val_int);
30516 break;
30517 case DW_OP_skip:
30518 case DW_OP_bra:
30519 {
30520 int offset;
30521
30522 gcc_assert (val1->val_class == dw_val_class_loc);
30523 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30524 hstate.add_object (offset);
30525 }
30526 break;
30527 case DW_OP_implicit_value:
30528 hstate.add_object (val1->v.val_unsigned);
30529 switch (val2->val_class)
30530 {
30531 case dw_val_class_const:
30532 hstate.add_object (val2->v.val_int);
30533 break;
30534 case dw_val_class_vec:
30535 {
30536 unsigned int elt_size = val2->v.val_vec.elt_size;
30537 unsigned int len = val2->v.val_vec.length;
30538
30539 hstate.add_int (elt_size);
30540 hstate.add_int (len);
30541 hstate.add (val2->v.val_vec.array, len * elt_size);
30542 }
30543 break;
30544 case dw_val_class_const_double:
30545 hstate.add_object (val2->v.val_double.low);
30546 hstate.add_object (val2->v.val_double.high);
30547 break;
30548 case dw_val_class_wide_int:
30549 hstate.add (val2->v.val_wide->get_val (),
30550 get_full_len (*val2->v.val_wide)
30551 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30552 break;
30553 case dw_val_class_addr:
30554 inchash::add_rtx (val2->v.val_addr, hstate);
30555 break;
30556 default:
30557 gcc_unreachable ();
30558 }
30559 break;
30560 case DW_OP_bregx:
30561 case DW_OP_bit_piece:
30562 hstate.add_object (val1->v.val_int);
30563 hstate.add_object (val2->v.val_int);
30564 break;
30565 case DW_OP_addr:
30566 hash_addr:
30567 if (loc->dtprel)
30568 {
30569 unsigned char dtprel = 0xd1;
30570 hstate.add_object (dtprel);
30571 }
30572 inchash::add_rtx (val1->v.val_addr, hstate);
30573 break;
30574 case DW_OP_GNU_addr_index:
30575 case DW_OP_addrx:
30576 case DW_OP_GNU_const_index:
30577 case DW_OP_constx:
30578 {
30579 if (loc->dtprel)
30580 {
30581 unsigned char dtprel = 0xd1;
30582 hstate.add_object (dtprel);
30583 }
30584 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30585 }
30586 break;
30587 case DW_OP_implicit_pointer:
30588 case DW_OP_GNU_implicit_pointer:
30589 hstate.add_int (val2->v.val_int);
30590 break;
30591 case DW_OP_entry_value:
30592 case DW_OP_GNU_entry_value:
30593 hstate.add_object (val1->v.val_loc);
30594 break;
30595 case DW_OP_regval_type:
30596 case DW_OP_deref_type:
30597 case DW_OP_GNU_regval_type:
30598 case DW_OP_GNU_deref_type:
30599 {
30600 unsigned int byte_size
30601 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30602 unsigned int encoding
30603 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30604 hstate.add_object (val1->v.val_int);
30605 hstate.add_object (byte_size);
30606 hstate.add_object (encoding);
30607 }
30608 break;
30609 case DW_OP_convert:
30610 case DW_OP_reinterpret:
30611 case DW_OP_GNU_convert:
30612 case DW_OP_GNU_reinterpret:
30613 if (val1->val_class == dw_val_class_unsigned_const)
30614 {
30615 hstate.add_object (val1->v.val_unsigned);
30616 break;
30617 }
30618 /* FALLTHRU */
30619 case DW_OP_const_type:
30620 case DW_OP_GNU_const_type:
30621 {
30622 unsigned int byte_size
30623 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30624 unsigned int encoding
30625 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30626 hstate.add_object (byte_size);
30627 hstate.add_object (encoding);
30628 if (loc->dw_loc_opc != DW_OP_const_type
30629 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30630 break;
30631 hstate.add_object (val2->val_class);
30632 switch (val2->val_class)
30633 {
30634 case dw_val_class_const:
30635 hstate.add_object (val2->v.val_int);
30636 break;
30637 case dw_val_class_vec:
30638 {
30639 unsigned int elt_size = val2->v.val_vec.elt_size;
30640 unsigned int len = val2->v.val_vec.length;
30641
30642 hstate.add_object (elt_size);
30643 hstate.add_object (len);
30644 hstate.add (val2->v.val_vec.array, len * elt_size);
30645 }
30646 break;
30647 case dw_val_class_const_double:
30648 hstate.add_object (val2->v.val_double.low);
30649 hstate.add_object (val2->v.val_double.high);
30650 break;
30651 case dw_val_class_wide_int:
30652 hstate.add (val2->v.val_wide->get_val (),
30653 get_full_len (*val2->v.val_wide)
30654 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30655 break;
30656 default:
30657 gcc_unreachable ();
30658 }
30659 }
30660 break;
30661
30662 default:
30663 /* Other codes have no operands. */
30664 break;
30665 }
30666 }
30667
30668 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30669
30670 static inline void
30671 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30672 {
30673 dw_loc_descr_ref l;
30674 bool sizes_computed = false;
30675 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30676 size_of_locs (loc);
30677
30678 for (l = loc; l != NULL; l = l->dw_loc_next)
30679 {
30680 enum dwarf_location_atom opc = l->dw_loc_opc;
30681 hstate.add_object (opc);
30682 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30683 {
30684 size_of_locs (loc);
30685 sizes_computed = true;
30686 }
30687 hash_loc_operands (l, hstate);
30688 }
30689 }
30690
30691 /* Compute hash of the whole location list LIST_HEAD. */
30692
30693 static inline void
30694 hash_loc_list (dw_loc_list_ref list_head)
30695 {
30696 dw_loc_list_ref curr = list_head;
30697 inchash::hash hstate;
30698
30699 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30700 {
30701 hstate.add (curr->begin, strlen (curr->begin) + 1);
30702 hstate.add (curr->end, strlen (curr->end) + 1);
30703 hstate.add_object (curr->vbegin);
30704 hstate.add_object (curr->vend);
30705 if (curr->section)
30706 hstate.add (curr->section, strlen (curr->section) + 1);
30707 hash_locs (curr->expr, hstate);
30708 }
30709 list_head->hash = hstate.end ();
30710 }
30711
30712 /* Return true if X and Y opcodes have the same operands. */
30713
30714 static inline bool
30715 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30716 {
30717 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30718 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30719 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30720 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30721
30722 switch (x->dw_loc_opc)
30723 {
30724 case DW_OP_const4u:
30725 case DW_OP_const8u:
30726 if (x->dtprel)
30727 goto hash_addr;
30728 /* FALLTHRU */
30729 case DW_OP_const1u:
30730 case DW_OP_const1s:
30731 case DW_OP_const2u:
30732 case DW_OP_const2s:
30733 case DW_OP_const4s:
30734 case DW_OP_const8s:
30735 case DW_OP_constu:
30736 case DW_OP_consts:
30737 case DW_OP_pick:
30738 case DW_OP_plus_uconst:
30739 case DW_OP_breg0:
30740 case DW_OP_breg1:
30741 case DW_OP_breg2:
30742 case DW_OP_breg3:
30743 case DW_OP_breg4:
30744 case DW_OP_breg5:
30745 case DW_OP_breg6:
30746 case DW_OP_breg7:
30747 case DW_OP_breg8:
30748 case DW_OP_breg9:
30749 case DW_OP_breg10:
30750 case DW_OP_breg11:
30751 case DW_OP_breg12:
30752 case DW_OP_breg13:
30753 case DW_OP_breg14:
30754 case DW_OP_breg15:
30755 case DW_OP_breg16:
30756 case DW_OP_breg17:
30757 case DW_OP_breg18:
30758 case DW_OP_breg19:
30759 case DW_OP_breg20:
30760 case DW_OP_breg21:
30761 case DW_OP_breg22:
30762 case DW_OP_breg23:
30763 case DW_OP_breg24:
30764 case DW_OP_breg25:
30765 case DW_OP_breg26:
30766 case DW_OP_breg27:
30767 case DW_OP_breg28:
30768 case DW_OP_breg29:
30769 case DW_OP_breg30:
30770 case DW_OP_breg31:
30771 case DW_OP_regx:
30772 case DW_OP_fbreg:
30773 case DW_OP_piece:
30774 case DW_OP_deref_size:
30775 case DW_OP_xderef_size:
30776 return valx1->v.val_int == valy1->v.val_int;
30777 case DW_OP_skip:
30778 case DW_OP_bra:
30779 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30780 can cause irrelevant differences in dw_loc_addr. */
30781 gcc_assert (valx1->val_class == dw_val_class_loc
30782 && valy1->val_class == dw_val_class_loc
30783 && (dwarf_split_debug_info
30784 || x->dw_loc_addr == y->dw_loc_addr));
30785 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30786 case DW_OP_implicit_value:
30787 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30788 || valx2->val_class != valy2->val_class)
30789 return false;
30790 switch (valx2->val_class)
30791 {
30792 case dw_val_class_const:
30793 return valx2->v.val_int == valy2->v.val_int;
30794 case dw_val_class_vec:
30795 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30796 && valx2->v.val_vec.length == valy2->v.val_vec.length
30797 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30798 valx2->v.val_vec.elt_size
30799 * valx2->v.val_vec.length) == 0;
30800 case dw_val_class_const_double:
30801 return valx2->v.val_double.low == valy2->v.val_double.low
30802 && valx2->v.val_double.high == valy2->v.val_double.high;
30803 case dw_val_class_wide_int:
30804 return *valx2->v.val_wide == *valy2->v.val_wide;
30805 case dw_val_class_addr:
30806 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30807 default:
30808 gcc_unreachable ();
30809 }
30810 case DW_OP_bregx:
30811 case DW_OP_bit_piece:
30812 return valx1->v.val_int == valy1->v.val_int
30813 && valx2->v.val_int == valy2->v.val_int;
30814 case DW_OP_addr:
30815 hash_addr:
30816 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30817 case DW_OP_GNU_addr_index:
30818 case DW_OP_addrx:
30819 case DW_OP_GNU_const_index:
30820 case DW_OP_constx:
30821 {
30822 rtx ax1 = valx1->val_entry->addr.rtl;
30823 rtx ay1 = valy1->val_entry->addr.rtl;
30824 return rtx_equal_p (ax1, ay1);
30825 }
30826 case DW_OP_implicit_pointer:
30827 case DW_OP_GNU_implicit_pointer:
30828 return valx1->val_class == dw_val_class_die_ref
30829 && valx1->val_class == valy1->val_class
30830 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30831 && valx2->v.val_int == valy2->v.val_int;
30832 case DW_OP_entry_value:
30833 case DW_OP_GNU_entry_value:
30834 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30835 case DW_OP_const_type:
30836 case DW_OP_GNU_const_type:
30837 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30838 || valx2->val_class != valy2->val_class)
30839 return false;
30840 switch (valx2->val_class)
30841 {
30842 case dw_val_class_const:
30843 return valx2->v.val_int == valy2->v.val_int;
30844 case dw_val_class_vec:
30845 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30846 && valx2->v.val_vec.length == valy2->v.val_vec.length
30847 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30848 valx2->v.val_vec.elt_size
30849 * valx2->v.val_vec.length) == 0;
30850 case dw_val_class_const_double:
30851 return valx2->v.val_double.low == valy2->v.val_double.low
30852 && valx2->v.val_double.high == valy2->v.val_double.high;
30853 case dw_val_class_wide_int:
30854 return *valx2->v.val_wide == *valy2->v.val_wide;
30855 default:
30856 gcc_unreachable ();
30857 }
30858 case DW_OP_regval_type:
30859 case DW_OP_deref_type:
30860 case DW_OP_GNU_regval_type:
30861 case DW_OP_GNU_deref_type:
30862 return valx1->v.val_int == valy1->v.val_int
30863 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30864 case DW_OP_convert:
30865 case DW_OP_reinterpret:
30866 case DW_OP_GNU_convert:
30867 case DW_OP_GNU_reinterpret:
30868 if (valx1->val_class != valy1->val_class)
30869 return false;
30870 if (valx1->val_class == dw_val_class_unsigned_const)
30871 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30872 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30873 case DW_OP_GNU_parameter_ref:
30874 return valx1->val_class == dw_val_class_die_ref
30875 && valx1->val_class == valy1->val_class
30876 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30877 default:
30878 /* Other codes have no operands. */
30879 return true;
30880 }
30881 }
30882
30883 /* Return true if DWARF location expressions X and Y are the same. */
30884
30885 static inline bool
30886 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30887 {
30888 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30889 if (x->dw_loc_opc != y->dw_loc_opc
30890 || x->dtprel != y->dtprel
30891 || !compare_loc_operands (x, y))
30892 break;
30893 return x == NULL && y == NULL;
30894 }
30895
30896 /* Hashtable helpers. */
30897
30898 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30899 {
30900 static inline hashval_t hash (const dw_loc_list_struct *);
30901 static inline bool equal (const dw_loc_list_struct *,
30902 const dw_loc_list_struct *);
30903 };
30904
30905 /* Return precomputed hash of location list X. */
30906
30907 inline hashval_t
30908 loc_list_hasher::hash (const dw_loc_list_struct *x)
30909 {
30910 return x->hash;
30911 }
30912
30913 /* Return true if location lists A and B are the same. */
30914
30915 inline bool
30916 loc_list_hasher::equal (const dw_loc_list_struct *a,
30917 const dw_loc_list_struct *b)
30918 {
30919 if (a == b)
30920 return 1;
30921 if (a->hash != b->hash)
30922 return 0;
30923 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30924 if (strcmp (a->begin, b->begin) != 0
30925 || strcmp (a->end, b->end) != 0
30926 || (a->section == NULL) != (b->section == NULL)
30927 || (a->section && strcmp (a->section, b->section) != 0)
30928 || a->vbegin != b->vbegin || a->vend != b->vend
30929 || !compare_locs (a->expr, b->expr))
30930 break;
30931 return a == NULL && b == NULL;
30932 }
30933
30934 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30935
30936
30937 /* Recursively optimize location lists referenced from DIE
30938 children and share them whenever possible. */
30939
30940 static void
30941 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30942 {
30943 dw_die_ref c;
30944 dw_attr_node *a;
30945 unsigned ix;
30946 dw_loc_list_struct **slot;
30947 bool drop_locviews = false;
30948 bool has_locviews = false;
30949
30950 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30951 if (AT_class (a) == dw_val_class_loc_list)
30952 {
30953 dw_loc_list_ref list = AT_loc_list (a);
30954 /* TODO: perform some optimizations here, before hashing
30955 it and storing into the hash table. */
30956 hash_loc_list (list);
30957 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30958 if (*slot == NULL)
30959 {
30960 *slot = list;
30961 if (loc_list_has_views (list))
30962 gcc_assert (list->vl_symbol);
30963 else if (list->vl_symbol)
30964 {
30965 drop_locviews = true;
30966 list->vl_symbol = NULL;
30967 }
30968 }
30969 else
30970 {
30971 if (list->vl_symbol && !(*slot)->vl_symbol)
30972 drop_locviews = true;
30973 a->dw_attr_val.v.val_loc_list = *slot;
30974 }
30975 }
30976 else if (AT_class (a) == dw_val_class_view_list)
30977 {
30978 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30979 has_locviews = true;
30980 }
30981
30982
30983 if (drop_locviews && has_locviews)
30984 remove_AT (die, DW_AT_GNU_locviews);
30985
30986 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
30987 }
30988
30989
30990 /* Recursively assign each location list a unique index into the debug_addr
30991 section. */
30992
30993 static void
30994 index_location_lists (dw_die_ref die)
30995 {
30996 dw_die_ref c;
30997 dw_attr_node *a;
30998 unsigned ix;
30999
31000 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31001 if (AT_class (a) == dw_val_class_loc_list)
31002 {
31003 dw_loc_list_ref list = AT_loc_list (a);
31004 dw_loc_list_ref curr;
31005 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31006 {
31007 /* Don't index an entry that has already been indexed
31008 or won't be output. Make sure skip_loc_list_entry doesn't
31009 call size_of_locs, because that might cause circular dependency,
31010 index_location_lists requiring address table indexes to be
31011 computed, but adding new indexes through add_addr_table_entry
31012 and address table index computation requiring no new additions
31013 to the hash table. In the rare case of DWARF[234] >= 64KB
31014 location expression, we'll just waste unused address table entry
31015 for it. */
31016 if (curr->begin_entry != NULL
31017 || skip_loc_list_entry (curr))
31018 continue;
31019
31020 curr->begin_entry
31021 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31022 }
31023 }
31024
31025 FOR_EACH_CHILD (die, c, index_location_lists (c));
31026 }
31027
31028 /* Optimize location lists referenced from DIE
31029 children and share them whenever possible. */
31030
31031 static void
31032 optimize_location_lists (dw_die_ref die)
31033 {
31034 loc_list_hash_type htab (500);
31035 optimize_location_lists_1 (die, &htab);
31036 }
31037 \f
31038 /* Traverse the limbo die list, and add parent/child links. The only
31039 dies without parents that should be here are concrete instances of
31040 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31041 For concrete instances, we can get the parent die from the abstract
31042 instance. */
31043
31044 static void
31045 flush_limbo_die_list (void)
31046 {
31047 limbo_die_node *node;
31048
31049 /* get_context_die calls force_decl_die, which can put new DIEs on the
31050 limbo list in LTO mode when nested functions are put in a different
31051 partition than that of their parent function. */
31052 while ((node = limbo_die_list))
31053 {
31054 dw_die_ref die = node->die;
31055 limbo_die_list = node->next;
31056
31057 if (die->die_parent == NULL)
31058 {
31059 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31060
31061 if (origin && origin->die_parent)
31062 add_child_die (origin->die_parent, die);
31063 else if (is_cu_die (die))
31064 ;
31065 else if (seen_error ())
31066 /* It's OK to be confused by errors in the input. */
31067 add_child_die (comp_unit_die (), die);
31068 else
31069 {
31070 /* In certain situations, the lexical block containing a
31071 nested function can be optimized away, which results
31072 in the nested function die being orphaned. Likewise
31073 with the return type of that nested function. Force
31074 this to be a child of the containing function.
31075
31076 It may happen that even the containing function got fully
31077 inlined and optimized out. In that case we are lost and
31078 assign the empty child. This should not be big issue as
31079 the function is likely unreachable too. */
31080 gcc_assert (node->created_for);
31081
31082 if (DECL_P (node->created_for))
31083 origin = get_context_die (DECL_CONTEXT (node->created_for));
31084 else if (TYPE_P (node->created_for))
31085 origin = scope_die_for (node->created_for, comp_unit_die ());
31086 else
31087 origin = comp_unit_die ();
31088
31089 add_child_die (origin, die);
31090 }
31091 }
31092 }
31093 }
31094
31095 /* Reset DIEs so we can output them again. */
31096
31097 static void
31098 reset_dies (dw_die_ref die)
31099 {
31100 dw_die_ref c;
31101
31102 /* Remove stuff we re-generate. */
31103 die->die_mark = 0;
31104 die->die_offset = 0;
31105 die->die_abbrev = 0;
31106 remove_AT (die, DW_AT_sibling);
31107
31108 FOR_EACH_CHILD (die, c, reset_dies (c));
31109 }
31110
31111 /* Output stuff that dwarf requires at the end of every file,
31112 and generate the DWARF-2 debugging info. */
31113
31114 static void
31115 dwarf2out_finish (const char *)
31116 {
31117 comdat_type_node *ctnode;
31118 dw_die_ref main_comp_unit_die;
31119 unsigned char checksum[16];
31120 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31121
31122 /* Flush out any latecomers to the limbo party. */
31123 flush_limbo_die_list ();
31124
31125 if (inline_entry_data_table)
31126 gcc_assert (inline_entry_data_table->elements () == 0);
31127
31128 if (flag_checking)
31129 {
31130 verify_die (comp_unit_die ());
31131 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31132 verify_die (node->die);
31133 }
31134
31135 /* We shouldn't have any symbols with delayed asm names for
31136 DIEs generated after early finish. */
31137 gcc_assert (deferred_asm_name == NULL);
31138
31139 gen_remaining_tmpl_value_param_die_attribute ();
31140
31141 if (flag_generate_lto || flag_generate_offload)
31142 {
31143 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31144
31145 /* Prune stuff so that dwarf2out_finish runs successfully
31146 for the fat part of the object. */
31147 reset_dies (comp_unit_die ());
31148 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31149 reset_dies (node->die);
31150
31151 hash_table<comdat_type_hasher> comdat_type_table (100);
31152 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31153 {
31154 comdat_type_node **slot
31155 = comdat_type_table.find_slot (ctnode, INSERT);
31156
31157 /* Don't reset types twice. */
31158 if (*slot != HTAB_EMPTY_ENTRY)
31159 continue;
31160
31161 /* Add a pointer to the line table for the main compilation unit
31162 so that the debugger can make sense of DW_AT_decl_file
31163 attributes. */
31164 if (debug_info_level >= DINFO_LEVEL_TERSE)
31165 reset_dies (ctnode->root_die);
31166
31167 *slot = ctnode;
31168 }
31169
31170 /* Reset die CU symbol so we don't output it twice. */
31171 comp_unit_die ()->die_id.die_symbol = NULL;
31172
31173 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31174 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31175 if (have_macinfo)
31176 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31177
31178 /* Remove indirect string decisions. */
31179 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31180 }
31181
31182 #if ENABLE_ASSERT_CHECKING
31183 {
31184 dw_die_ref die = comp_unit_die (), c;
31185 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31186 }
31187 #endif
31188 resolve_addr (comp_unit_die ());
31189 move_marked_base_types ();
31190
31191 /* Initialize sections and labels used for actual assembler output. */
31192 unsigned generation = init_sections_and_labels (false);
31193
31194 /* Traverse the DIE's and add sibling attributes to those DIE's that
31195 have children. */
31196 add_sibling_attributes (comp_unit_die ());
31197 limbo_die_node *node;
31198 for (node = cu_die_list; node; node = node->next)
31199 add_sibling_attributes (node->die);
31200 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31201 add_sibling_attributes (ctnode->root_die);
31202
31203 /* When splitting DWARF info, we put some attributes in the
31204 skeleton compile_unit DIE that remains in the .o, while
31205 most attributes go in the DWO compile_unit_die. */
31206 if (dwarf_split_debug_info)
31207 {
31208 limbo_die_node *cu;
31209 main_comp_unit_die = gen_compile_unit_die (NULL);
31210 if (dwarf_version >= 5)
31211 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31212 cu = limbo_die_list;
31213 gcc_assert (cu->die == main_comp_unit_die);
31214 limbo_die_list = limbo_die_list->next;
31215 cu->next = cu_die_list;
31216 cu_die_list = cu;
31217 }
31218 else
31219 main_comp_unit_die = comp_unit_die ();
31220
31221 /* Output a terminator label for the .text section. */
31222 switch_to_section (text_section);
31223 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31224 if (cold_text_section)
31225 {
31226 switch_to_section (cold_text_section);
31227 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31228 }
31229
31230 /* We can only use the low/high_pc attributes if all of the code was
31231 in .text. */
31232 if (!have_multiple_function_sections
31233 || (dwarf_version < 3 && dwarf_strict))
31234 {
31235 /* Don't add if the CU has no associated code. */
31236 if (text_section_used)
31237 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31238 text_end_label, true);
31239 }
31240 else
31241 {
31242 unsigned fde_idx;
31243 dw_fde_ref fde;
31244 bool range_list_added = false;
31245
31246 if (text_section_used)
31247 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31248 text_end_label, &range_list_added, true);
31249 if (cold_text_section_used)
31250 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31251 cold_end_label, &range_list_added, true);
31252
31253 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31254 {
31255 if (DECL_IGNORED_P (fde->decl))
31256 continue;
31257 if (!fde->in_std_section)
31258 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31259 fde->dw_fde_end, &range_list_added,
31260 true);
31261 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31262 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31263 fde->dw_fde_second_end, &range_list_added,
31264 true);
31265 }
31266
31267 if (range_list_added)
31268 {
31269 /* We need to give .debug_loc and .debug_ranges an appropriate
31270 "base address". Use zero so that these addresses become
31271 absolute. Historically, we've emitted the unexpected
31272 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31273 Emit both to give time for other tools to adapt. */
31274 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31275 if (! dwarf_strict && dwarf_version < 4)
31276 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31277
31278 add_ranges (NULL);
31279 }
31280 }
31281
31282 /* AIX Assembler inserts the length, so adjust the reference to match the
31283 offset expected by debuggers. */
31284 strcpy (dl_section_ref, debug_line_section_label);
31285 if (XCOFF_DEBUGGING_INFO)
31286 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31287
31288 if (debug_info_level >= DINFO_LEVEL_TERSE)
31289 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31290 dl_section_ref);
31291
31292 if (have_macinfo)
31293 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31294 macinfo_section_label);
31295
31296 if (dwarf_split_debug_info)
31297 {
31298 if (have_location_lists)
31299 {
31300 if (dwarf_version >= 5)
31301 add_AT_loclistsptr (comp_unit_die (), DW_AT_loclists_base,
31302 loc_section_label);
31303 /* optimize_location_lists calculates the size of the lists,
31304 so index them first, and assign indices to the entries.
31305 Although optimize_location_lists will remove entries from
31306 the table, it only does so for duplicates, and therefore
31307 only reduces ref_counts to 1. */
31308 index_location_lists (comp_unit_die ());
31309 }
31310
31311 if (addr_index_table != NULL)
31312 {
31313 unsigned int index = 0;
31314 addr_index_table
31315 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31316 (&index);
31317 }
31318 }
31319
31320 loc_list_idx = 0;
31321 if (have_location_lists)
31322 {
31323 optimize_location_lists (comp_unit_die ());
31324 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31325 if (dwarf_version >= 5 && dwarf_split_debug_info)
31326 assign_location_list_indexes (comp_unit_die ());
31327 }
31328
31329 save_macinfo_strings ();
31330
31331 if (dwarf_split_debug_info)
31332 {
31333 unsigned int index = 0;
31334
31335 /* Add attributes common to skeleton compile_units and
31336 type_units. Because these attributes include strings, it
31337 must be done before freezing the string table. Top-level
31338 skeleton die attrs are added when the skeleton type unit is
31339 created, so ensure it is created by this point. */
31340 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31341 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31342 }
31343
31344 /* Output all of the compilation units. We put the main one last so that
31345 the offsets are available to output_pubnames. */
31346 for (node = cu_die_list; node; node = node->next)
31347 output_comp_unit (node->die, 0, NULL);
31348
31349 hash_table<comdat_type_hasher> comdat_type_table (100);
31350 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31351 {
31352 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31353
31354 /* Don't output duplicate types. */
31355 if (*slot != HTAB_EMPTY_ENTRY)
31356 continue;
31357
31358 /* Add a pointer to the line table for the main compilation unit
31359 so that the debugger can make sense of DW_AT_decl_file
31360 attributes. */
31361 if (debug_info_level >= DINFO_LEVEL_TERSE)
31362 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31363 (!dwarf_split_debug_info
31364 ? dl_section_ref
31365 : debug_skeleton_line_section_label));
31366
31367 output_comdat_type_unit (ctnode);
31368 *slot = ctnode;
31369 }
31370
31371 if (dwarf_split_debug_info)
31372 {
31373 int mark;
31374 struct md5_ctx ctx;
31375
31376 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31377 index_rnglists ();
31378
31379 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31380 md5_init_ctx (&ctx);
31381 mark = 0;
31382 die_checksum (comp_unit_die (), &ctx, &mark);
31383 unmark_all_dies (comp_unit_die ());
31384 md5_finish_ctx (&ctx, checksum);
31385
31386 if (dwarf_version < 5)
31387 {
31388 /* Use the first 8 bytes of the checksum as the dwo_id,
31389 and add it to both comp-unit DIEs. */
31390 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31391 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31392 }
31393
31394 /* Add the base offset of the ranges table to the skeleton
31395 comp-unit DIE. */
31396 if (!vec_safe_is_empty (ranges_table))
31397 {
31398 if (dwarf_version >= 5)
31399 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31400 ranges_base_label);
31401 else
31402 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31403 ranges_section_label);
31404 }
31405
31406 switch_to_section (debug_addr_section);
31407 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31408 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31409 before DWARF5, didn't have a header for .debug_addr units.
31410 DWARF5 specifies a small header when address tables are used. */
31411 if (dwarf_version >= 5)
31412 {
31413 unsigned int last_idx = 0;
31414 unsigned long addrs_length;
31415
31416 addr_index_table->traverse_noresize
31417 <unsigned int *, count_index_addrs> (&last_idx);
31418 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31419
31420 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31421 dw2_asm_output_data (4, 0xffffffff,
31422 "Escape value for 64-bit DWARF extension");
31423 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31424 "Length of Address Unit");
31425 dw2_asm_output_data (2, 5, "DWARF addr version");
31426 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31427 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31428 }
31429 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31430 output_addr_table ();
31431 }
31432
31433 /* Output the main compilation unit if non-empty or if .debug_macinfo
31434 or .debug_macro will be emitted. */
31435 output_comp_unit (comp_unit_die (), have_macinfo,
31436 dwarf_split_debug_info ? checksum : NULL);
31437
31438 if (dwarf_split_debug_info && info_section_emitted)
31439 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31440
31441 /* Output the abbreviation table. */
31442 if (vec_safe_length (abbrev_die_table) != 1)
31443 {
31444 switch_to_section (debug_abbrev_section);
31445 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31446 output_abbrev_section ();
31447 }
31448
31449 /* Output location list section if necessary. */
31450 if (have_location_lists)
31451 {
31452 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31453 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31454 /* Output the location lists info. */
31455 switch_to_section (debug_loc_section);
31456 if (dwarf_version >= 5)
31457 {
31458 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 1);
31459 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 2);
31460 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31461 dw2_asm_output_data (4, 0xffffffff,
31462 "Initial length escape value indicating "
31463 "64-bit DWARF extension");
31464 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31465 "Length of Location Lists");
31466 ASM_OUTPUT_LABEL (asm_out_file, l1);
31467 output_dwarf_version ();
31468 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31469 dw2_asm_output_data (1, 0, "Segment Size");
31470 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31471 "Offset Entry Count");
31472 }
31473 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31474 if (dwarf_version >= 5 && dwarf_split_debug_info)
31475 {
31476 unsigned int save_loc_list_idx = loc_list_idx;
31477 loc_list_idx = 0;
31478 output_loclists_offsets (comp_unit_die ());
31479 gcc_assert (save_loc_list_idx == loc_list_idx);
31480 }
31481 output_location_lists (comp_unit_die ());
31482 if (dwarf_version >= 5)
31483 ASM_OUTPUT_LABEL (asm_out_file, l2);
31484 }
31485
31486 output_pubtables ();
31487
31488 /* Output the address range information if a CU (.debug_info section)
31489 was emitted. We output an empty table even if we had no functions
31490 to put in it. This because the consumer has no way to tell the
31491 difference between an empty table that we omitted and failure to
31492 generate a table that would have contained data. */
31493 if (info_section_emitted)
31494 {
31495 switch_to_section (debug_aranges_section);
31496 output_aranges ();
31497 }
31498
31499 /* Output ranges section if necessary. */
31500 if (!vec_safe_is_empty (ranges_table))
31501 {
31502 if (dwarf_version >= 5)
31503 output_rnglists (generation);
31504 else
31505 output_ranges ();
31506 }
31507
31508 /* Have to end the macro section. */
31509 if (have_macinfo)
31510 {
31511 switch_to_section (debug_macinfo_section);
31512 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31513 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31514 : debug_skeleton_line_section_label, false);
31515 dw2_asm_output_data (1, 0, "End compilation unit");
31516 }
31517
31518 /* Output the source line correspondence table. We must do this
31519 even if there is no line information. Otherwise, on an empty
31520 translation unit, we will generate a present, but empty,
31521 .debug_info section. IRIX 6.5 `nm' will then complain when
31522 examining the file. This is done late so that any filenames
31523 used by the debug_info section are marked as 'used'. */
31524 switch_to_section (debug_line_section);
31525 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31526 if (! output_asm_line_debug_info ())
31527 output_line_info (false);
31528
31529 if (dwarf_split_debug_info && info_section_emitted)
31530 {
31531 switch_to_section (debug_skeleton_line_section);
31532 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31533 output_line_info (true);
31534 }
31535
31536 /* If we emitted any indirect strings, output the string table too. */
31537 if (debug_str_hash || skeleton_debug_str_hash)
31538 output_indirect_strings ();
31539 if (debug_line_str_hash)
31540 {
31541 switch_to_section (debug_line_str_section);
31542 const enum dwarf_form form = DW_FORM_line_strp;
31543 debug_line_str_hash->traverse<enum dwarf_form,
31544 output_indirect_string> (form);
31545 }
31546
31547 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31548 symview_upper_bound = 0;
31549 if (zero_view_p)
31550 bitmap_clear (zero_view_p);
31551 }
31552
31553 /* Returns a hash value for X (which really is a variable_value_struct). */
31554
31555 inline hashval_t
31556 variable_value_hasher::hash (variable_value_struct *x)
31557 {
31558 return (hashval_t) x->decl_id;
31559 }
31560
31561 /* Return nonzero if decl_id of variable_value_struct X is the same as
31562 UID of decl Y. */
31563
31564 inline bool
31565 variable_value_hasher::equal (variable_value_struct *x, tree y)
31566 {
31567 return x->decl_id == DECL_UID (y);
31568 }
31569
31570 /* Helper function for resolve_variable_value, handle
31571 DW_OP_GNU_variable_value in one location expression.
31572 Return true if exprloc has been changed into loclist. */
31573
31574 static bool
31575 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31576 {
31577 dw_loc_descr_ref next;
31578 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31579 {
31580 next = loc->dw_loc_next;
31581 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31582 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31583 continue;
31584
31585 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31586 if (DECL_CONTEXT (decl) != current_function_decl)
31587 continue;
31588
31589 dw_die_ref ref = lookup_decl_die (decl);
31590 if (ref)
31591 {
31592 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31593 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31594 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31595 continue;
31596 }
31597 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31598 if (l == NULL)
31599 continue;
31600 if (l->dw_loc_next)
31601 {
31602 if (AT_class (a) != dw_val_class_loc)
31603 continue;
31604 switch (a->dw_attr)
31605 {
31606 /* Following attributes allow both exprloc and loclist
31607 classes, so we can change them into a loclist. */
31608 case DW_AT_location:
31609 case DW_AT_string_length:
31610 case DW_AT_return_addr:
31611 case DW_AT_data_member_location:
31612 case DW_AT_frame_base:
31613 case DW_AT_segment:
31614 case DW_AT_static_link:
31615 case DW_AT_use_location:
31616 case DW_AT_vtable_elem_location:
31617 if (prev)
31618 {
31619 prev->dw_loc_next = NULL;
31620 prepend_loc_descr_to_each (l, AT_loc (a));
31621 }
31622 if (next)
31623 add_loc_descr_to_each (l, next);
31624 a->dw_attr_val.val_class = dw_val_class_loc_list;
31625 a->dw_attr_val.val_entry = NULL;
31626 a->dw_attr_val.v.val_loc_list = l;
31627 have_location_lists = true;
31628 return true;
31629 /* Following attributes allow both exprloc and reference,
31630 so if the whole expression is DW_OP_GNU_variable_value alone
31631 we could transform it into reference. */
31632 case DW_AT_byte_size:
31633 case DW_AT_bit_size:
31634 case DW_AT_lower_bound:
31635 case DW_AT_upper_bound:
31636 case DW_AT_bit_stride:
31637 case DW_AT_count:
31638 case DW_AT_allocated:
31639 case DW_AT_associated:
31640 case DW_AT_byte_stride:
31641 if (prev == NULL && next == NULL)
31642 break;
31643 /* FALLTHRU */
31644 default:
31645 if (dwarf_strict)
31646 continue;
31647 break;
31648 }
31649 /* Create DW_TAG_variable that we can refer to. */
31650 gen_decl_die (decl, NULL_TREE, NULL,
31651 lookup_decl_die (current_function_decl));
31652 ref = lookup_decl_die (decl);
31653 if (ref)
31654 {
31655 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31656 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31657 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31658 }
31659 continue;
31660 }
31661 if (prev)
31662 {
31663 prev->dw_loc_next = l->expr;
31664 add_loc_descr (&prev->dw_loc_next, next);
31665 free_loc_descr (loc, NULL);
31666 next = prev->dw_loc_next;
31667 }
31668 else
31669 {
31670 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31671 add_loc_descr (&loc, next);
31672 next = loc;
31673 }
31674 loc = prev;
31675 }
31676 return false;
31677 }
31678
31679 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31680
31681 static void
31682 resolve_variable_value (dw_die_ref die)
31683 {
31684 dw_attr_node *a;
31685 dw_loc_list_ref loc;
31686 unsigned ix;
31687
31688 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31689 switch (AT_class (a))
31690 {
31691 case dw_val_class_loc:
31692 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31693 break;
31694 /* FALLTHRU */
31695 case dw_val_class_loc_list:
31696 loc = AT_loc_list (a);
31697 gcc_assert (loc);
31698 for (; loc; loc = loc->dw_loc_next)
31699 resolve_variable_value_in_expr (a, loc->expr);
31700 break;
31701 default:
31702 break;
31703 }
31704 }
31705
31706 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31707 temporaries in the current function. */
31708
31709 static void
31710 resolve_variable_values (void)
31711 {
31712 if (!variable_value_hash || !current_function_decl)
31713 return;
31714
31715 struct variable_value_struct *node
31716 = variable_value_hash->find_with_hash (current_function_decl,
31717 DECL_UID (current_function_decl));
31718
31719 if (node == NULL)
31720 return;
31721
31722 unsigned int i;
31723 dw_die_ref die;
31724 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31725 resolve_variable_value (die);
31726 }
31727
31728 /* Helper function for note_variable_value, handle one location
31729 expression. */
31730
31731 static void
31732 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31733 {
31734 for (; loc; loc = loc->dw_loc_next)
31735 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31736 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31737 {
31738 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31739 dw_die_ref ref = lookup_decl_die (decl);
31740 if (! ref && (flag_generate_lto || flag_generate_offload))
31741 {
31742 /* ??? This is somewhat a hack because we do not create DIEs
31743 for variables not in BLOCK trees early but when generating
31744 early LTO output we need the dw_val_class_decl_ref to be
31745 fully resolved. For fat LTO objects we'd also like to
31746 undo this after LTO dwarf output. */
31747 gcc_assert (DECL_CONTEXT (decl));
31748 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31749 gcc_assert (ctx != NULL);
31750 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31751 ref = lookup_decl_die (decl);
31752 gcc_assert (ref != NULL);
31753 }
31754 if (ref)
31755 {
31756 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31757 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31758 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31759 continue;
31760 }
31761 if (VAR_P (decl)
31762 && DECL_CONTEXT (decl)
31763 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31764 && lookup_decl_die (DECL_CONTEXT (decl)))
31765 {
31766 if (!variable_value_hash)
31767 variable_value_hash
31768 = hash_table<variable_value_hasher>::create_ggc (10);
31769
31770 tree fndecl = DECL_CONTEXT (decl);
31771 struct variable_value_struct *node;
31772 struct variable_value_struct **slot
31773 = variable_value_hash->find_slot_with_hash (fndecl,
31774 DECL_UID (fndecl),
31775 INSERT);
31776 if (*slot == NULL)
31777 {
31778 node = ggc_cleared_alloc<variable_value_struct> ();
31779 node->decl_id = DECL_UID (fndecl);
31780 *slot = node;
31781 }
31782 else
31783 node = *slot;
31784
31785 vec_safe_push (node->dies, die);
31786 }
31787 }
31788 }
31789
31790 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31791 with dw_val_class_decl_ref operand. */
31792
31793 static void
31794 note_variable_value (dw_die_ref die)
31795 {
31796 dw_die_ref c;
31797 dw_attr_node *a;
31798 dw_loc_list_ref loc;
31799 unsigned ix;
31800
31801 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31802 switch (AT_class (a))
31803 {
31804 case dw_val_class_loc_list:
31805 loc = AT_loc_list (a);
31806 gcc_assert (loc);
31807 if (!loc->noted_variable_value)
31808 {
31809 loc->noted_variable_value = 1;
31810 for (; loc; loc = loc->dw_loc_next)
31811 note_variable_value_in_expr (die, loc->expr);
31812 }
31813 break;
31814 case dw_val_class_loc:
31815 note_variable_value_in_expr (die, AT_loc (a));
31816 break;
31817 default:
31818 break;
31819 }
31820
31821 /* Mark children. */
31822 FOR_EACH_CHILD (die, c, note_variable_value (c));
31823 }
31824
31825 /* Perform any cleanups needed after the early debug generation pass
31826 has run. */
31827
31828 static void
31829 dwarf2out_early_finish (const char *filename)
31830 {
31831 set_early_dwarf s;
31832 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31833
31834 /* PCH might result in DW_AT_producer string being restored from the
31835 header compilation, so always fill it with empty string initially
31836 and overwrite only here. */
31837 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31838 producer_string = gen_producer_string ();
31839 producer->dw_attr_val.v.val_str->refcount--;
31840 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31841
31842 /* Add the name for the main input file now. We delayed this from
31843 dwarf2out_init to avoid complications with PCH. */
31844 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31845 add_comp_dir_attribute (comp_unit_die ());
31846
31847 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31848 DW_AT_comp_dir into .debug_line_str section. */
31849 if (!dwarf2out_as_loc_support
31850 && dwarf_version >= 5
31851 && DWARF5_USE_DEBUG_LINE_STR)
31852 {
31853 for (int i = 0; i < 2; i++)
31854 {
31855 dw_attr_node *a = get_AT (comp_unit_die (),
31856 i ? DW_AT_comp_dir : DW_AT_name);
31857 if (a == NULL
31858 || AT_class (a) != dw_val_class_str
31859 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31860 continue;
31861
31862 if (! debug_line_str_hash)
31863 debug_line_str_hash
31864 = hash_table<indirect_string_hasher>::create_ggc (10);
31865
31866 struct indirect_string_node *node
31867 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31868 set_indirect_string (node);
31869 node->form = DW_FORM_line_strp;
31870 a->dw_attr_val.v.val_str->refcount--;
31871 a->dw_attr_val.v.val_str = node;
31872 }
31873 }
31874
31875 /* With LTO early dwarf was really finished at compile-time, so make
31876 sure to adjust the phase after annotating the LTRANS CU DIE. */
31877 if (in_lto_p)
31878 {
31879 early_dwarf_finished = true;
31880 return;
31881 }
31882
31883 /* Walk through the list of incomplete types again, trying once more to
31884 emit full debugging info for them. */
31885 retry_incomplete_types ();
31886
31887 /* The point here is to flush out the limbo list so that it is empty
31888 and we don't need to stream it for LTO. */
31889 flush_limbo_die_list ();
31890
31891 gen_scheduled_generic_parms_dies ();
31892 gen_remaining_tmpl_value_param_die_attribute ();
31893
31894 /* Add DW_AT_linkage_name for all deferred DIEs. */
31895 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31896 {
31897 tree decl = node->created_for;
31898 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31899 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31900 ended up in deferred_asm_name before we knew it was
31901 constant and never written to disk. */
31902 && DECL_ASSEMBLER_NAME (decl))
31903 {
31904 add_linkage_attr (node->die, decl);
31905 move_linkage_attr (node->die);
31906 }
31907 }
31908 deferred_asm_name = NULL;
31909
31910 if (flag_eliminate_unused_debug_types)
31911 prune_unused_types ();
31912
31913 /* Generate separate COMDAT sections for type DIEs. */
31914 if (use_debug_types)
31915 {
31916 break_out_comdat_types (comp_unit_die ());
31917
31918 /* Each new type_unit DIE was added to the limbo die list when created.
31919 Since these have all been added to comdat_type_list, clear the
31920 limbo die list. */
31921 limbo_die_list = NULL;
31922
31923 /* For each new comdat type unit, copy declarations for incomplete
31924 types to make the new unit self-contained (i.e., no direct
31925 references to the main compile unit). */
31926 for (comdat_type_node *ctnode = comdat_type_list;
31927 ctnode != NULL; ctnode = ctnode->next)
31928 copy_decls_for_unworthy_types (ctnode->root_die);
31929 copy_decls_for_unworthy_types (comp_unit_die ());
31930
31931 /* In the process of copying declarations from one unit to another,
31932 we may have left some declarations behind that are no longer
31933 referenced. Prune them. */
31934 prune_unused_types ();
31935 }
31936
31937 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31938 with dw_val_class_decl_ref operand. */
31939 note_variable_value (comp_unit_die ());
31940 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31941 note_variable_value (node->die);
31942 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31943 ctnode = ctnode->next)
31944 note_variable_value (ctnode->root_die);
31945 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31946 note_variable_value (node->die);
31947
31948 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31949 both the main_cu and all skeleton TUs. Making this call unconditional
31950 would end up either adding a second copy of the AT_pubnames attribute, or
31951 requiring a special case in add_top_level_skeleton_die_attrs. */
31952 if (!dwarf_split_debug_info)
31953 add_AT_pubnames (comp_unit_die ());
31954
31955 /* The early debug phase is now finished. */
31956 early_dwarf_finished = true;
31957
31958 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
31959 if ((!flag_generate_lto && !flag_generate_offload)
31960 /* FIXME: Disable debug info generation for PE-COFF targets since the
31961 copy_lto_debug_sections operation of the simple object support in
31962 libiberty is not implemented for them yet. */
31963 || TARGET_PECOFF)
31964 return;
31965
31966 /* Now as we are going to output for LTO initialize sections and labels
31967 to the LTO variants. We don't need a random-seed postfix as other
31968 LTO sections as linking the LTO debug sections into one in a partial
31969 link is fine. */
31970 init_sections_and_labels (true);
31971
31972 /* The output below is modeled after dwarf2out_finish with all
31973 location related output removed and some LTO specific changes.
31974 Some refactoring might make both smaller and easier to match up. */
31975
31976 /* Traverse the DIE's and add add sibling attributes to those DIE's
31977 that have children. */
31978 add_sibling_attributes (comp_unit_die ());
31979 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31980 add_sibling_attributes (node->die);
31981 for (comdat_type_node *ctnode = comdat_type_list;
31982 ctnode != NULL; ctnode = ctnode->next)
31983 add_sibling_attributes (ctnode->root_die);
31984
31985 /* AIX Assembler inserts the length, so adjust the reference to match the
31986 offset expected by debuggers. */
31987 strcpy (dl_section_ref, debug_line_section_label);
31988 if (XCOFF_DEBUGGING_INFO)
31989 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31990
31991 if (debug_info_level >= DINFO_LEVEL_TERSE)
31992 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
31993
31994 if (have_macinfo)
31995 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31996 macinfo_section_label);
31997
31998 save_macinfo_strings ();
31999
32000 if (dwarf_split_debug_info)
32001 {
32002 unsigned int index = 0;
32003 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32004 }
32005
32006 /* Output all of the compilation units. We put the main one last so that
32007 the offsets are available to output_pubnames. */
32008 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32009 output_comp_unit (node->die, 0, NULL);
32010
32011 hash_table<comdat_type_hasher> comdat_type_table (100);
32012 for (comdat_type_node *ctnode = comdat_type_list;
32013 ctnode != NULL; ctnode = ctnode->next)
32014 {
32015 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32016
32017 /* Don't output duplicate types. */
32018 if (*slot != HTAB_EMPTY_ENTRY)
32019 continue;
32020
32021 /* Add a pointer to the line table for the main compilation unit
32022 so that the debugger can make sense of DW_AT_decl_file
32023 attributes. */
32024 if (debug_info_level >= DINFO_LEVEL_TERSE)
32025 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32026 (!dwarf_split_debug_info
32027 ? debug_line_section_label
32028 : debug_skeleton_line_section_label));
32029
32030 output_comdat_type_unit (ctnode);
32031 *slot = ctnode;
32032 }
32033
32034 /* Stick a unique symbol to the main debuginfo section. */
32035 compute_comp_unit_symbol (comp_unit_die ());
32036
32037 /* Output the main compilation unit. We always need it if only for
32038 the CU symbol. */
32039 output_comp_unit (comp_unit_die (), true, NULL);
32040
32041 /* Output the abbreviation table. */
32042 if (vec_safe_length (abbrev_die_table) != 1)
32043 {
32044 switch_to_section (debug_abbrev_section);
32045 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32046 output_abbrev_section ();
32047 }
32048
32049 /* Have to end the macro section. */
32050 if (have_macinfo)
32051 {
32052 /* We have to save macinfo state if we need to output it again
32053 for the FAT part of the object. */
32054 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32055 if (flag_fat_lto_objects)
32056 macinfo_table = macinfo_table->copy ();
32057
32058 switch_to_section (debug_macinfo_section);
32059 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32060 output_macinfo (debug_line_section_label, true);
32061 dw2_asm_output_data (1, 0, "End compilation unit");
32062
32063 if (flag_fat_lto_objects)
32064 {
32065 vec_free (macinfo_table);
32066 macinfo_table = saved_macinfo_table;
32067 }
32068 }
32069
32070 /* Emit a skeleton debug_line section. */
32071 switch_to_section (debug_line_section);
32072 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32073 output_line_info (true);
32074
32075 /* If we emitted any indirect strings, output the string table too. */
32076 if (debug_str_hash || skeleton_debug_str_hash)
32077 output_indirect_strings ();
32078
32079 /* Switch back to the text section. */
32080 switch_to_section (text_section);
32081 }
32082
32083 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32084 within the same process. For use by toplev::finalize. */
32085
32086 void
32087 dwarf2out_c_finalize (void)
32088 {
32089 last_var_location_insn = NULL;
32090 cached_next_real_insn = NULL;
32091 used_rtx_array = NULL;
32092 incomplete_types = NULL;
32093 decl_scope_table = NULL;
32094 debug_info_section = NULL;
32095 debug_skeleton_info_section = NULL;
32096 debug_abbrev_section = NULL;
32097 debug_skeleton_abbrev_section = NULL;
32098 debug_aranges_section = NULL;
32099 debug_addr_section = NULL;
32100 debug_macinfo_section = NULL;
32101 debug_line_section = NULL;
32102 debug_skeleton_line_section = NULL;
32103 debug_loc_section = NULL;
32104 debug_pubnames_section = NULL;
32105 debug_pubtypes_section = NULL;
32106 debug_str_section = NULL;
32107 debug_line_str_section = NULL;
32108 debug_str_dwo_section = NULL;
32109 debug_str_offsets_section = NULL;
32110 debug_ranges_section = NULL;
32111 debug_frame_section = NULL;
32112 fde_vec = NULL;
32113 debug_str_hash = NULL;
32114 debug_line_str_hash = NULL;
32115 skeleton_debug_str_hash = NULL;
32116 dw2_string_counter = 0;
32117 have_multiple_function_sections = false;
32118 text_section_used = false;
32119 cold_text_section_used = false;
32120 cold_text_section = NULL;
32121 current_unit_personality = NULL;
32122
32123 early_dwarf = false;
32124 early_dwarf_finished = false;
32125
32126 next_die_offset = 0;
32127 single_comp_unit_die = NULL;
32128 comdat_type_list = NULL;
32129 limbo_die_list = NULL;
32130 file_table = NULL;
32131 decl_die_table = NULL;
32132 common_block_die_table = NULL;
32133 decl_loc_table = NULL;
32134 call_arg_locations = NULL;
32135 call_arg_loc_last = NULL;
32136 call_site_count = -1;
32137 tail_call_site_count = -1;
32138 cached_dw_loc_list_table = NULL;
32139 abbrev_die_table = NULL;
32140 delete dwarf_proc_stack_usage_map;
32141 dwarf_proc_stack_usage_map = NULL;
32142 line_info_label_num = 0;
32143 cur_line_info_table = NULL;
32144 text_section_line_info = NULL;
32145 cold_text_section_line_info = NULL;
32146 separate_line_info = NULL;
32147 info_section_emitted = false;
32148 pubname_table = NULL;
32149 pubtype_table = NULL;
32150 macinfo_table = NULL;
32151 ranges_table = NULL;
32152 ranges_by_label = NULL;
32153 rnglist_idx = 0;
32154 have_location_lists = false;
32155 loclabel_num = 0;
32156 poc_label_num = 0;
32157 last_emitted_file = NULL;
32158 label_num = 0;
32159 tmpl_value_parm_die_table = NULL;
32160 generic_type_instances = NULL;
32161 frame_pointer_fb_offset = 0;
32162 frame_pointer_fb_offset_valid = false;
32163 base_types.release ();
32164 XDELETEVEC (producer_string);
32165 producer_string = NULL;
32166 }
32167
32168 #include "gt-dwarf2out.h"