[debug] Add debug and earlydebug dumps
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105
106 #ifndef XCOFF_DEBUGGING_INFO
107 #define XCOFF_DEBUGGING_INFO 0
108 #endif
109
110 #ifndef HAVE_XCOFF_DWARF_EXTRAS
111 #define HAVE_XCOFF_DWARF_EXTRAS 0
112 #endif
113
114 #ifdef VMS_DEBUGGING_INFO
115 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
116
117 /* Define this macro to be a nonzero value if the directory specifications
118 which are output in the debug info should end with a separator. */
119 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
120 /* Define this macro to evaluate to a nonzero value if GCC should refrain
121 from generating indirect strings in DWARF2 debug information, for instance
122 if your target is stuck with an old version of GDB that is unable to
123 process them properly or uses VMS Debug. */
124 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
125 #else
126 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
127 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
128 #endif
129
130 /* ??? Poison these here until it can be done generically. They've been
131 totally replaced in this file; make sure it stays that way. */
132 #undef DWARF2_UNWIND_INFO
133 #undef DWARF2_FRAME_INFO
134 #if (GCC_VERSION >= 3000)
135 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
136 #endif
137
138 /* The size of the target's pointer type. */
139 #ifndef PTR_SIZE
140 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
141 #endif
142
143 /* Array of RTXes referenced by the debugging information, which therefore
144 must be kept around forever. */
145 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
146
147 /* A pointer to the base of a list of incomplete types which might be
148 completed at some later time. incomplete_types_list needs to be a
149 vec<tree, va_gc> *because we want to tell the garbage collector about
150 it. */
151 static GTY(()) vec<tree, va_gc> *incomplete_types;
152
153 /* Pointers to various DWARF2 sections. */
154 static GTY(()) section *debug_info_section;
155 static GTY(()) section *debug_skeleton_info_section;
156 static GTY(()) section *debug_abbrev_section;
157 static GTY(()) section *debug_skeleton_abbrev_section;
158 static GTY(()) section *debug_aranges_section;
159 static GTY(()) section *debug_addr_section;
160 static GTY(()) section *debug_macinfo_section;
161 static const char *debug_macinfo_section_name;
162 static unsigned macinfo_label_base = 1;
163 static GTY(()) section *debug_line_section;
164 static GTY(()) section *debug_skeleton_line_section;
165 static GTY(()) section *debug_loc_section;
166 static GTY(()) section *debug_pubnames_section;
167 static GTY(()) section *debug_pubtypes_section;
168 static GTY(()) section *debug_str_section;
169 static GTY(()) section *debug_line_str_section;
170 static GTY(()) section *debug_str_dwo_section;
171 static GTY(()) section *debug_str_offsets_section;
172 static GTY(()) section *debug_ranges_section;
173 static GTY(()) section *debug_frame_section;
174
175 /* Maximum size (in bytes) of an artificially generated label. */
176 #define MAX_ARTIFICIAL_LABEL_BYTES 40
177
178 /* According to the (draft) DWARF 3 specification, the initial length
179 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
180 bytes are 0xffffffff, followed by the length stored in the next 8
181 bytes.
182
183 However, the SGI/MIPS ABI uses an initial length which is equal to
184 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
185
186 #ifndef DWARF_INITIAL_LENGTH_SIZE
187 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
188 #endif
189
190 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
191 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
192 #endif
193
194 /* Round SIZE up to the nearest BOUNDARY. */
195 #define DWARF_ROUND(SIZE,BOUNDARY) \
196 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
197
198 /* CIE identifier. */
199 #if HOST_BITS_PER_WIDE_INT >= 64
200 #define DWARF_CIE_ID \
201 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
202 #else
203 #define DWARF_CIE_ID DW_CIE_ID
204 #endif
205
206
207 /* A vector for a table that contains frame description
208 information for each routine. */
209 #define NOT_INDEXED (-1U)
210 #define NO_INDEX_ASSIGNED (-2U)
211
212 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
213
214 struct GTY((for_user)) indirect_string_node {
215 const char *str;
216 unsigned int refcount;
217 enum dwarf_form form;
218 char *label;
219 unsigned int index;
220 };
221
222 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
223 {
224 typedef const char *compare_type;
225
226 static hashval_t hash (indirect_string_node *);
227 static bool equal (indirect_string_node *, const char *);
228 };
229
230 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
231
232 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
233
234 /* With split_debug_info, both the comp_dir and dwo_name go in the
235 main object file, rather than the dwo, similar to the force_direct
236 parameter elsewhere but with additional complications:
237
238 1) The string is needed in both the main object file and the dwo.
239 That is, the comp_dir and dwo_name will appear in both places.
240
241 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
242 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
243
244 3) GCC chooses the form to use late, depending on the size and
245 reference count.
246
247 Rather than forcing the all debug string handling functions and
248 callers to deal with these complications, simply use a separate,
249 special-cased string table for any attribute that should go in the
250 main object file. This limits the complexity to just the places
251 that need it. */
252
253 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
254
255 static GTY(()) int dw2_string_counter;
256
257 /* True if the compilation unit places functions in more than one section. */
258 static GTY(()) bool have_multiple_function_sections = false;
259
260 /* Whether the default text and cold text sections have been used at all. */
261 static GTY(()) bool text_section_used = false;
262 static GTY(()) bool cold_text_section_used = false;
263
264 /* The default cold text section. */
265 static GTY(()) section *cold_text_section;
266
267 /* The DIE for C++14 'auto' in a function return type. */
268 static GTY(()) dw_die_ref auto_die;
269
270 /* The DIE for C++14 'decltype(auto)' in a function return type. */
271 static GTY(()) dw_die_ref decltype_auto_die;
272
273 /* Forward declarations for functions defined in this file. */
274
275 static void output_call_frame_info (int);
276 static void dwarf2out_note_section_used (void);
277
278 /* Personality decl of current unit. Used only when assembler does not support
279 personality CFI. */
280 static GTY(()) rtx current_unit_personality;
281
282 /* Whether an eh_frame section is required. */
283 static GTY(()) bool do_eh_frame = false;
284
285 /* .debug_rnglists next index. */
286 static unsigned int rnglist_idx;
287
288 /* Data and reference forms for relocatable data. */
289 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
290 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
291
292 #ifndef DEBUG_FRAME_SECTION
293 #define DEBUG_FRAME_SECTION ".debug_frame"
294 #endif
295
296 #ifndef FUNC_BEGIN_LABEL
297 #define FUNC_BEGIN_LABEL "LFB"
298 #endif
299
300 #ifndef FUNC_END_LABEL
301 #define FUNC_END_LABEL "LFE"
302 #endif
303
304 #ifndef PROLOGUE_END_LABEL
305 #define PROLOGUE_END_LABEL "LPE"
306 #endif
307
308 #ifndef EPILOGUE_BEGIN_LABEL
309 #define EPILOGUE_BEGIN_LABEL "LEB"
310 #endif
311
312 #ifndef FRAME_BEGIN_LABEL
313 #define FRAME_BEGIN_LABEL "Lframe"
314 #endif
315 #define CIE_AFTER_SIZE_LABEL "LSCIE"
316 #define CIE_END_LABEL "LECIE"
317 #define FDE_LABEL "LSFDE"
318 #define FDE_AFTER_SIZE_LABEL "LASFDE"
319 #define FDE_END_LABEL "LEFDE"
320 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
321 #define LINE_NUMBER_END_LABEL "LELT"
322 #define LN_PROLOG_AS_LABEL "LASLTP"
323 #define LN_PROLOG_END_LABEL "LELTP"
324 #define DIE_LABEL_PREFIX "DW"
325 \f
326 /* Match the base name of a file to the base name of a compilation unit. */
327
328 static int
329 matches_main_base (const char *path)
330 {
331 /* Cache the last query. */
332 static const char *last_path = NULL;
333 static int last_match = 0;
334 if (path != last_path)
335 {
336 const char *base;
337 int length = base_of_path (path, &base);
338 last_path = path;
339 last_match = (length == main_input_baselength
340 && memcmp (base, main_input_basename, length) == 0);
341 }
342 return last_match;
343 }
344
345 #ifdef DEBUG_DEBUG_STRUCT
346
347 static int
348 dump_struct_debug (tree type, enum debug_info_usage usage,
349 enum debug_struct_file criterion, int generic,
350 int matches, int result)
351 {
352 /* Find the type name. */
353 tree type_decl = TYPE_STUB_DECL (type);
354 tree t = type_decl;
355 const char *name = 0;
356 if (TREE_CODE (t) == TYPE_DECL)
357 t = DECL_NAME (t);
358 if (t)
359 name = IDENTIFIER_POINTER (t);
360
361 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
362 criterion,
363 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
364 matches ? "bas" : "hdr",
365 generic ? "gen" : "ord",
366 usage == DINFO_USAGE_DFN ? ";" :
367 usage == DINFO_USAGE_DIR_USE ? "." : "*",
368 result,
369 (void*) type_decl, name);
370 return result;
371 }
372 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
373 dump_struct_debug (type, usage, criterion, generic, matches, result)
374
375 #else
376
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 (result)
379
380 #endif
381
382 /* Get the number of HOST_WIDE_INTs needed to represent the precision
383 of the number. Some constants have a large uniform precision, so
384 we get the precision needed for the actual value of the number. */
385
386 static unsigned int
387 get_full_len (const wide_int &op)
388 {
389 int prec = wi::min_precision (op, UNSIGNED);
390 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
391 / HOST_BITS_PER_WIDE_INT);
392 }
393
394 static bool
395 should_emit_struct_debug (tree type, enum debug_info_usage usage)
396 {
397 enum debug_struct_file criterion;
398 tree type_decl;
399 bool generic = lang_hooks.types.generic_p (type);
400
401 if (generic)
402 criterion = debug_struct_generic[usage];
403 else
404 criterion = debug_struct_ordinary[usage];
405
406 if (criterion == DINFO_STRUCT_FILE_NONE)
407 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
408 if (criterion == DINFO_STRUCT_FILE_ANY)
409 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
410
411 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
412
413 if (type_decl != NULL)
414 {
415 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
416 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
417
418 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
419 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
420 }
421
422 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
423 }
424 \f
425 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
426 switch to the data section instead, and write out a synthetic start label
427 for collect2 the first time around. */
428
429 static void
430 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
431 {
432 if (eh_frame_section == 0)
433 {
434 int flags;
435
436 if (EH_TABLES_CAN_BE_READ_ONLY)
437 {
438 int fde_encoding;
439 int per_encoding;
440 int lsda_encoding;
441
442 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
443 /*global=*/0);
444 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
445 /*global=*/1);
446 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
447 /*global=*/0);
448 flags = ((! flag_pic
449 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
450 && (fde_encoding & 0x70) != DW_EH_PE_aligned
451 && (per_encoding & 0x70) != DW_EH_PE_absptr
452 && (per_encoding & 0x70) != DW_EH_PE_aligned
453 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
454 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
455 ? 0 : SECTION_WRITE);
456 }
457 else
458 flags = SECTION_WRITE;
459
460 #ifdef EH_FRAME_SECTION_NAME
461 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
462 #else
463 eh_frame_section = ((flags == SECTION_WRITE)
464 ? data_section : readonly_data_section);
465 #endif /* EH_FRAME_SECTION_NAME */
466 }
467
468 switch_to_section (eh_frame_section);
469
470 #ifdef EH_FRAME_THROUGH_COLLECT2
471 /* We have no special eh_frame section. Emit special labels to guide
472 collect2. */
473 if (!back)
474 {
475 tree label = get_file_function_name ("F");
476 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
477 targetm.asm_out.globalize_label (asm_out_file,
478 IDENTIFIER_POINTER (label));
479 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
480 }
481 #endif
482 }
483
484 /* Switch [BACK] to the eh or debug frame table section, depending on
485 FOR_EH. */
486
487 static void
488 switch_to_frame_table_section (int for_eh, bool back)
489 {
490 if (for_eh)
491 switch_to_eh_frame_section (back);
492 else
493 {
494 if (!debug_frame_section)
495 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
496 SECTION_DEBUG, NULL);
497 switch_to_section (debug_frame_section);
498 }
499 }
500
501 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
502
503 enum dw_cfi_oprnd_type
504 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
505 {
506 switch (cfi)
507 {
508 case DW_CFA_nop:
509 case DW_CFA_GNU_window_save:
510 case DW_CFA_remember_state:
511 case DW_CFA_restore_state:
512 return dw_cfi_oprnd_unused;
513
514 case DW_CFA_set_loc:
515 case DW_CFA_advance_loc1:
516 case DW_CFA_advance_loc2:
517 case DW_CFA_advance_loc4:
518 case DW_CFA_MIPS_advance_loc8:
519 return dw_cfi_oprnd_addr;
520
521 case DW_CFA_offset:
522 case DW_CFA_offset_extended:
523 case DW_CFA_def_cfa:
524 case DW_CFA_offset_extended_sf:
525 case DW_CFA_def_cfa_sf:
526 case DW_CFA_restore:
527 case DW_CFA_restore_extended:
528 case DW_CFA_undefined:
529 case DW_CFA_same_value:
530 case DW_CFA_def_cfa_register:
531 case DW_CFA_register:
532 case DW_CFA_expression:
533 case DW_CFA_val_expression:
534 return dw_cfi_oprnd_reg_num;
535
536 case DW_CFA_def_cfa_offset:
537 case DW_CFA_GNU_args_size:
538 case DW_CFA_def_cfa_offset_sf:
539 return dw_cfi_oprnd_offset;
540
541 case DW_CFA_def_cfa_expression:
542 return dw_cfi_oprnd_loc;
543
544 default:
545 gcc_unreachable ();
546 }
547 }
548
549 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
550
551 enum dw_cfi_oprnd_type
552 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
553 {
554 switch (cfi)
555 {
556 case DW_CFA_def_cfa:
557 case DW_CFA_def_cfa_sf:
558 case DW_CFA_offset:
559 case DW_CFA_offset_extended_sf:
560 case DW_CFA_offset_extended:
561 return dw_cfi_oprnd_offset;
562
563 case DW_CFA_register:
564 return dw_cfi_oprnd_reg_num;
565
566 case DW_CFA_expression:
567 case DW_CFA_val_expression:
568 return dw_cfi_oprnd_loc;
569
570 case DW_CFA_def_cfa_expression:
571 return dw_cfi_oprnd_cfa_loc;
572
573 default:
574 return dw_cfi_oprnd_unused;
575 }
576 }
577
578 /* Output one FDE. */
579
580 static void
581 output_fde (dw_fde_ref fde, bool for_eh, bool second,
582 char *section_start_label, int fde_encoding, char *augmentation,
583 bool any_lsda_needed, int lsda_encoding)
584 {
585 const char *begin, *end;
586 static unsigned int j;
587 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
588
589 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
590 /* empty */ 0);
591 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
592 for_eh + j);
593 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
594 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
595 if (!XCOFF_DEBUGGING_INFO || for_eh)
596 {
597 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
598 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
599 " indicating 64-bit DWARF extension");
600 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
601 "FDE Length");
602 }
603 ASM_OUTPUT_LABEL (asm_out_file, l1);
604
605 if (for_eh)
606 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
607 else
608 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
609 debug_frame_section, "FDE CIE offset");
610
611 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
612 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
613
614 if (for_eh)
615 {
616 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
617 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
618 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
619 "FDE initial location");
620 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
621 end, begin, "FDE address range");
622 }
623 else
624 {
625 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
626 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
627 }
628
629 if (augmentation[0])
630 {
631 if (any_lsda_needed)
632 {
633 int size = size_of_encoded_value (lsda_encoding);
634
635 if (lsda_encoding == DW_EH_PE_aligned)
636 {
637 int offset = ( 4 /* Length */
638 + 4 /* CIE offset */
639 + 2 * size_of_encoded_value (fde_encoding)
640 + 1 /* Augmentation size */ );
641 int pad = -offset & (PTR_SIZE - 1);
642
643 size += pad;
644 gcc_assert (size_of_uleb128 (size) == 1);
645 }
646
647 dw2_asm_output_data_uleb128 (size, "Augmentation size");
648
649 if (fde->uses_eh_lsda)
650 {
651 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
652 fde->funcdef_number);
653 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
654 gen_rtx_SYMBOL_REF (Pmode, l1),
655 false,
656 "Language Specific Data Area");
657 }
658 else
659 {
660 if (lsda_encoding == DW_EH_PE_aligned)
661 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
662 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
663 "Language Specific Data Area (none)");
664 }
665 }
666 else
667 dw2_asm_output_data_uleb128 (0, "Augmentation size");
668 }
669
670 /* Loop through the Call Frame Instructions associated with this FDE. */
671 fde->dw_fde_current_label = begin;
672 {
673 size_t from, until, i;
674
675 from = 0;
676 until = vec_safe_length (fde->dw_fde_cfi);
677
678 if (fde->dw_fde_second_begin == NULL)
679 ;
680 else if (!second)
681 until = fde->dw_fde_switch_cfi_index;
682 else
683 from = fde->dw_fde_switch_cfi_index;
684
685 for (i = from; i < until; i++)
686 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
687 }
688
689 /* If we are to emit a ref/link from function bodies to their frame tables,
690 do it now. This is typically performed to make sure that tables
691 associated with functions are dragged with them and not discarded in
692 garbage collecting links. We need to do this on a per function basis to
693 cope with -ffunction-sections. */
694
695 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
696 /* Switch to the function section, emit the ref to the tables, and
697 switch *back* into the table section. */
698 switch_to_section (function_section (fde->decl));
699 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
700 switch_to_frame_table_section (for_eh, true);
701 #endif
702
703 /* Pad the FDE out to an address sized boundary. */
704 ASM_OUTPUT_ALIGN (asm_out_file,
705 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
706 ASM_OUTPUT_LABEL (asm_out_file, l2);
707
708 j += 2;
709 }
710
711 /* Return true if frame description entry FDE is needed for EH. */
712
713 static bool
714 fde_needed_for_eh_p (dw_fde_ref fde)
715 {
716 if (flag_asynchronous_unwind_tables)
717 return true;
718
719 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
720 return true;
721
722 if (fde->uses_eh_lsda)
723 return true;
724
725 /* If exceptions are enabled, we have collected nothrow info. */
726 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
727 return false;
728
729 return true;
730 }
731
732 /* Output the call frame information used to record information
733 that relates to calculating the frame pointer, and records the
734 location of saved registers. */
735
736 static void
737 output_call_frame_info (int for_eh)
738 {
739 unsigned int i;
740 dw_fde_ref fde;
741 dw_cfi_ref cfi;
742 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
743 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
744 bool any_lsda_needed = false;
745 char augmentation[6];
746 int augmentation_size;
747 int fde_encoding = DW_EH_PE_absptr;
748 int per_encoding = DW_EH_PE_absptr;
749 int lsda_encoding = DW_EH_PE_absptr;
750 int return_reg;
751 rtx personality = NULL;
752 int dw_cie_version;
753
754 /* Don't emit a CIE if there won't be any FDEs. */
755 if (!fde_vec)
756 return;
757
758 /* Nothing to do if the assembler's doing it all. */
759 if (dwarf2out_do_cfi_asm ())
760 return;
761
762 /* If we don't have any functions we'll want to unwind out of, don't emit
763 any EH unwind information. If we make FDEs linkonce, we may have to
764 emit an empty label for an FDE that wouldn't otherwise be emitted. We
765 want to avoid having an FDE kept around when the function it refers to
766 is discarded. Example where this matters: a primary function template
767 in C++ requires EH information, an explicit specialization doesn't. */
768 if (for_eh)
769 {
770 bool any_eh_needed = false;
771
772 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
773 {
774 if (fde->uses_eh_lsda)
775 any_eh_needed = any_lsda_needed = true;
776 else if (fde_needed_for_eh_p (fde))
777 any_eh_needed = true;
778 else if (TARGET_USES_WEAK_UNWIND_INFO)
779 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
780 }
781
782 if (!any_eh_needed)
783 return;
784 }
785
786 /* We're going to be generating comments, so turn on app. */
787 if (flag_debug_asm)
788 app_enable ();
789
790 /* Switch to the proper frame section, first time. */
791 switch_to_frame_table_section (for_eh, false);
792
793 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
794 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
795
796 /* Output the CIE. */
797 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
798 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
799 if (!XCOFF_DEBUGGING_INFO || for_eh)
800 {
801 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
802 dw2_asm_output_data (4, 0xffffffff,
803 "Initial length escape value indicating 64-bit DWARF extension");
804 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
805 "Length of Common Information Entry");
806 }
807 ASM_OUTPUT_LABEL (asm_out_file, l1);
808
809 /* Now that the CIE pointer is PC-relative for EH,
810 use 0 to identify the CIE. */
811 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
812 (for_eh ? 0 : DWARF_CIE_ID),
813 "CIE Identifier Tag");
814
815 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
816 use CIE version 1, unless that would produce incorrect results
817 due to overflowing the return register column. */
818 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
819 dw_cie_version = 1;
820 if (return_reg >= 256 || dwarf_version > 2)
821 dw_cie_version = 3;
822 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
823
824 augmentation[0] = 0;
825 augmentation_size = 0;
826
827 personality = current_unit_personality;
828 if (for_eh)
829 {
830 char *p;
831
832 /* Augmentation:
833 z Indicates that a uleb128 is present to size the
834 augmentation section.
835 L Indicates the encoding (and thus presence) of
836 an LSDA pointer in the FDE augmentation.
837 R Indicates a non-default pointer encoding for
838 FDE code pointers.
839 P Indicates the presence of an encoding + language
840 personality routine in the CIE augmentation. */
841
842 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
843 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
844 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
845
846 p = augmentation + 1;
847 if (personality)
848 {
849 *p++ = 'P';
850 augmentation_size += 1 + size_of_encoded_value (per_encoding);
851 assemble_external_libcall (personality);
852 }
853 if (any_lsda_needed)
854 {
855 *p++ = 'L';
856 augmentation_size += 1;
857 }
858 if (fde_encoding != DW_EH_PE_absptr)
859 {
860 *p++ = 'R';
861 augmentation_size += 1;
862 }
863 if (p > augmentation + 1)
864 {
865 augmentation[0] = 'z';
866 *p = '\0';
867 }
868
869 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
870 if (personality && per_encoding == DW_EH_PE_aligned)
871 {
872 int offset = ( 4 /* Length */
873 + 4 /* CIE Id */
874 + 1 /* CIE version */
875 + strlen (augmentation) + 1 /* Augmentation */
876 + size_of_uleb128 (1) /* Code alignment */
877 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
878 + 1 /* RA column */
879 + 1 /* Augmentation size */
880 + 1 /* Personality encoding */ );
881 int pad = -offset & (PTR_SIZE - 1);
882
883 augmentation_size += pad;
884
885 /* Augmentations should be small, so there's scarce need to
886 iterate for a solution. Die if we exceed one uleb128 byte. */
887 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
888 }
889 }
890
891 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
892 if (dw_cie_version >= 4)
893 {
894 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
895 dw2_asm_output_data (1, 0, "CIE Segment Size");
896 }
897 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
898 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
899 "CIE Data Alignment Factor");
900
901 if (dw_cie_version == 1)
902 dw2_asm_output_data (1, return_reg, "CIE RA Column");
903 else
904 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
905
906 if (augmentation[0])
907 {
908 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
909 if (personality)
910 {
911 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
912 eh_data_format_name (per_encoding));
913 dw2_asm_output_encoded_addr_rtx (per_encoding,
914 personality,
915 true, NULL);
916 }
917
918 if (any_lsda_needed)
919 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
920 eh_data_format_name (lsda_encoding));
921
922 if (fde_encoding != DW_EH_PE_absptr)
923 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
924 eh_data_format_name (fde_encoding));
925 }
926
927 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
928 output_cfi (cfi, NULL, for_eh);
929
930 /* Pad the CIE out to an address sized boundary. */
931 ASM_OUTPUT_ALIGN (asm_out_file,
932 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
933 ASM_OUTPUT_LABEL (asm_out_file, l2);
934
935 /* Loop through all of the FDE's. */
936 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
937 {
938 unsigned int k;
939
940 /* Don't emit EH unwind info for leaf functions that don't need it. */
941 if (for_eh && !fde_needed_for_eh_p (fde))
942 continue;
943
944 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
945 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
946 augmentation, any_lsda_needed, lsda_encoding);
947 }
948
949 if (for_eh && targetm.terminate_dw2_eh_frame_info)
950 dw2_asm_output_data (4, 0, "End of Table");
951
952 /* Turn off app to make assembly quicker. */
953 if (flag_debug_asm)
954 app_disable ();
955 }
956
957 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
958
959 static void
960 dwarf2out_do_cfi_startproc (bool second)
961 {
962 int enc;
963 rtx ref;
964
965 fprintf (asm_out_file, "\t.cfi_startproc\n");
966
967 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
968 eh unwinders. */
969 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
970 return;
971
972 rtx personality = get_personality_function (current_function_decl);
973
974 if (personality)
975 {
976 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
977 ref = personality;
978
979 /* ??? The GAS support isn't entirely consistent. We have to
980 handle indirect support ourselves, but PC-relative is done
981 in the assembler. Further, the assembler can't handle any
982 of the weirder relocation types. */
983 if (enc & DW_EH_PE_indirect)
984 ref = dw2_force_const_mem (ref, true);
985
986 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
987 output_addr_const (asm_out_file, ref);
988 fputc ('\n', asm_out_file);
989 }
990
991 if (crtl->uses_eh_lsda)
992 {
993 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
994
995 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
996 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
997 current_function_funcdef_no);
998 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
999 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1000
1001 if (enc & DW_EH_PE_indirect)
1002 ref = dw2_force_const_mem (ref, true);
1003
1004 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1005 output_addr_const (asm_out_file, ref);
1006 fputc ('\n', asm_out_file);
1007 }
1008 }
1009
1010 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1011 this allocation may be done before pass_final. */
1012
1013 dw_fde_ref
1014 dwarf2out_alloc_current_fde (void)
1015 {
1016 dw_fde_ref fde;
1017
1018 fde = ggc_cleared_alloc<dw_fde_node> ();
1019 fde->decl = current_function_decl;
1020 fde->funcdef_number = current_function_funcdef_no;
1021 fde->fde_index = vec_safe_length (fde_vec);
1022 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1023 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1024 fde->nothrow = crtl->nothrow;
1025 fde->drap_reg = INVALID_REGNUM;
1026 fde->vdrap_reg = INVALID_REGNUM;
1027
1028 /* Record the FDE associated with this function. */
1029 cfun->fde = fde;
1030 vec_safe_push (fde_vec, fde);
1031
1032 return fde;
1033 }
1034
1035 /* Output a marker (i.e. a label) for the beginning of a function, before
1036 the prologue. */
1037
1038 void
1039 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1040 unsigned int column ATTRIBUTE_UNUSED,
1041 const char *file ATTRIBUTE_UNUSED)
1042 {
1043 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1044 char * dup_label;
1045 dw_fde_ref fde;
1046 section *fnsec;
1047 bool do_frame;
1048
1049 current_function_func_begin_label = NULL;
1050
1051 do_frame = dwarf2out_do_frame ();
1052
1053 /* ??? current_function_func_begin_label is also used by except.c for
1054 call-site information. We must emit this label if it might be used. */
1055 if (!do_frame
1056 && (!flag_exceptions
1057 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1058 return;
1059
1060 fnsec = function_section (current_function_decl);
1061 switch_to_section (fnsec);
1062 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1063 current_function_funcdef_no);
1064 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1065 current_function_funcdef_no);
1066 dup_label = xstrdup (label);
1067 current_function_func_begin_label = dup_label;
1068
1069 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1070 if (!do_frame)
1071 return;
1072
1073 /* Unlike the debug version, the EH version of frame unwind info is a per-
1074 function setting so we need to record whether we need it for the unit. */
1075 do_eh_frame |= dwarf2out_do_eh_frame ();
1076
1077 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1078 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1079 would include pass_dwarf2_frame. If we've not created the FDE yet,
1080 do so now. */
1081 fde = cfun->fde;
1082 if (fde == NULL)
1083 fde = dwarf2out_alloc_current_fde ();
1084
1085 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1086 fde->dw_fde_begin = dup_label;
1087 fde->dw_fde_current_label = dup_label;
1088 fde->in_std_section = (fnsec == text_section
1089 || (cold_text_section && fnsec == cold_text_section));
1090
1091 /* We only want to output line number information for the genuine dwarf2
1092 prologue case, not the eh frame case. */
1093 #ifdef DWARF2_DEBUGGING_INFO
1094 if (file)
1095 dwarf2out_source_line (line, column, file, 0, true);
1096 #endif
1097
1098 if (dwarf2out_do_cfi_asm ())
1099 dwarf2out_do_cfi_startproc (false);
1100 else
1101 {
1102 rtx personality = get_personality_function (current_function_decl);
1103 if (!current_unit_personality)
1104 current_unit_personality = personality;
1105
1106 /* We cannot keep a current personality per function as without CFI
1107 asm, at the point where we emit the CFI data, there is no current
1108 function anymore. */
1109 if (personality && current_unit_personality != personality)
1110 sorry ("multiple EH personalities are supported only with assemblers "
1111 "supporting .cfi_personality directive");
1112 }
1113 }
1114
1115 /* Output a marker (i.e. a label) for the end of the generated code
1116 for a function prologue. This gets called *after* the prologue code has
1117 been generated. */
1118
1119 void
1120 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1121 const char *file ATTRIBUTE_UNUSED)
1122 {
1123 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1124
1125 /* Output a label to mark the endpoint of the code generated for this
1126 function. */
1127 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1128 current_function_funcdef_no);
1129 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1130 current_function_funcdef_no);
1131 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1132 }
1133
1134 /* Output a marker (i.e. a label) for the beginning of the generated code
1135 for a function epilogue. This gets called *before* the prologue code has
1136 been generated. */
1137
1138 void
1139 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1140 const char *file ATTRIBUTE_UNUSED)
1141 {
1142 dw_fde_ref fde = cfun->fde;
1143 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1144
1145 if (fde->dw_fde_vms_begin_epilogue)
1146 return;
1147
1148 /* Output a label to mark the endpoint of the code generated for this
1149 function. */
1150 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1151 current_function_funcdef_no);
1152 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1153 current_function_funcdef_no);
1154 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1155 }
1156
1157 /* Output a marker (i.e. a label) for the absolute end of the generated code
1158 for a function definition. This gets called *after* the epilogue code has
1159 been generated. */
1160
1161 void
1162 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1163 const char *file ATTRIBUTE_UNUSED)
1164 {
1165 dw_fde_ref fde;
1166 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1167
1168 last_var_location_insn = NULL;
1169 cached_next_real_insn = NULL;
1170
1171 if (dwarf2out_do_cfi_asm ())
1172 fprintf (asm_out_file, "\t.cfi_endproc\n");
1173
1174 /* Output a label to mark the endpoint of the code generated for this
1175 function. */
1176 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1177 current_function_funcdef_no);
1178 ASM_OUTPUT_LABEL (asm_out_file, label);
1179 fde = cfun->fde;
1180 gcc_assert (fde != NULL);
1181 if (fde->dw_fde_second_begin == NULL)
1182 fde->dw_fde_end = xstrdup (label);
1183 }
1184
1185 void
1186 dwarf2out_frame_finish (void)
1187 {
1188 /* Output call frame information. */
1189 if (targetm.debug_unwind_info () == UI_DWARF2)
1190 output_call_frame_info (0);
1191
1192 /* Output another copy for the unwinder. */
1193 if (do_eh_frame)
1194 output_call_frame_info (1);
1195 }
1196
1197 /* Note that the current function section is being used for code. */
1198
1199 static void
1200 dwarf2out_note_section_used (void)
1201 {
1202 section *sec = current_function_section ();
1203 if (sec == text_section)
1204 text_section_used = true;
1205 else if (sec == cold_text_section)
1206 cold_text_section_used = true;
1207 }
1208
1209 static void var_location_switch_text_section (void);
1210 static void set_cur_line_info_table (section *);
1211
1212 void
1213 dwarf2out_switch_text_section (void)
1214 {
1215 section *sect;
1216 dw_fde_ref fde = cfun->fde;
1217
1218 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1219
1220 if (!in_cold_section_p)
1221 {
1222 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1223 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1224 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1225 }
1226 else
1227 {
1228 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1229 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1230 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1231 }
1232 have_multiple_function_sections = true;
1233
1234 /* There is no need to mark used sections when not debugging. */
1235 if (cold_text_section != NULL)
1236 dwarf2out_note_section_used ();
1237
1238 if (dwarf2out_do_cfi_asm ())
1239 fprintf (asm_out_file, "\t.cfi_endproc\n");
1240
1241 /* Now do the real section switch. */
1242 sect = current_function_section ();
1243 switch_to_section (sect);
1244
1245 fde->second_in_std_section
1246 = (sect == text_section
1247 || (cold_text_section && sect == cold_text_section));
1248
1249 if (dwarf2out_do_cfi_asm ())
1250 dwarf2out_do_cfi_startproc (true);
1251
1252 var_location_switch_text_section ();
1253
1254 if (cold_text_section != NULL)
1255 set_cur_line_info_table (sect);
1256 }
1257 \f
1258 /* And now, the subset of the debugging information support code necessary
1259 for emitting location expressions. */
1260
1261 /* Data about a single source file. */
1262 struct GTY((for_user)) dwarf_file_data {
1263 const char * filename;
1264 int emitted_number;
1265 };
1266
1267 /* Describe an entry into the .debug_addr section. */
1268
1269 enum ate_kind {
1270 ate_kind_rtx,
1271 ate_kind_rtx_dtprel,
1272 ate_kind_label
1273 };
1274
1275 struct GTY((for_user)) addr_table_entry {
1276 enum ate_kind kind;
1277 unsigned int refcount;
1278 unsigned int index;
1279 union addr_table_entry_struct_union
1280 {
1281 rtx GTY ((tag ("0"))) rtl;
1282 char * GTY ((tag ("1"))) label;
1283 }
1284 GTY ((desc ("%1.kind"))) addr;
1285 };
1286
1287 typedef unsigned int var_loc_view;
1288
1289 /* Location lists are ranges + location descriptions for that range,
1290 so you can track variables that are in different places over
1291 their entire life. */
1292 typedef struct GTY(()) dw_loc_list_struct {
1293 dw_loc_list_ref dw_loc_next;
1294 const char *begin; /* Label and addr_entry for start of range */
1295 addr_table_entry *begin_entry;
1296 const char *end; /* Label for end of range */
1297 char *ll_symbol; /* Label for beginning of location list.
1298 Only on head of list. */
1299 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1300 const char *section; /* Section this loclist is relative to */
1301 dw_loc_descr_ref expr;
1302 var_loc_view vbegin, vend;
1303 hashval_t hash;
1304 /* True if all addresses in this and subsequent lists are known to be
1305 resolved. */
1306 bool resolved_addr;
1307 /* True if this list has been replaced by dw_loc_next. */
1308 bool replaced;
1309 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1310 section. */
1311 unsigned char emitted : 1;
1312 /* True if hash field is index rather than hash value. */
1313 unsigned char num_assigned : 1;
1314 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1315 unsigned char offset_emitted : 1;
1316 /* True if note_variable_value_in_expr has been called on it. */
1317 unsigned char noted_variable_value : 1;
1318 /* True if the range should be emitted even if begin and end
1319 are the same. */
1320 bool force;
1321 } dw_loc_list_node;
1322
1323 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1324 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1325
1326 /* Convert a DWARF stack opcode into its string name. */
1327
1328 static const char *
1329 dwarf_stack_op_name (unsigned int op)
1330 {
1331 const char *name = get_DW_OP_name (op);
1332
1333 if (name != NULL)
1334 return name;
1335
1336 return "OP_<unknown>";
1337 }
1338
1339 /* Return TRUE iff we're to output location view lists as a separate
1340 attribute next to the location lists, as an extension compatible
1341 with DWARF 2 and above. */
1342
1343 static inline bool
1344 dwarf2out_locviews_in_attribute ()
1345 {
1346 return debug_variable_location_views == 1;
1347 }
1348
1349 /* Return TRUE iff we're to output location view lists as part of the
1350 location lists, as proposed for standardization after DWARF 5. */
1351
1352 static inline bool
1353 dwarf2out_locviews_in_loclist ()
1354 {
1355 #ifndef DW_LLE_view_pair
1356 return false;
1357 #else
1358 return debug_variable_location_views == -1;
1359 #endif
1360 }
1361
1362 /* Return a pointer to a newly allocated location description. Location
1363 descriptions are simple expression terms that can be strung
1364 together to form more complicated location (address) descriptions. */
1365
1366 static inline dw_loc_descr_ref
1367 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1368 unsigned HOST_WIDE_INT oprnd2)
1369 {
1370 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1371
1372 descr->dw_loc_opc = op;
1373 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1374 descr->dw_loc_oprnd1.val_entry = NULL;
1375 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1376 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1377 descr->dw_loc_oprnd2.val_entry = NULL;
1378 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1379
1380 return descr;
1381 }
1382
1383 /* Add a location description term to a location description expression. */
1384
1385 static inline void
1386 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1387 {
1388 dw_loc_descr_ref *d;
1389
1390 /* Find the end of the chain. */
1391 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1392 ;
1393
1394 *d = descr;
1395 }
1396
1397 /* Compare two location operands for exact equality. */
1398
1399 static bool
1400 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1401 {
1402 if (a->val_class != b->val_class)
1403 return false;
1404 switch (a->val_class)
1405 {
1406 case dw_val_class_none:
1407 return true;
1408 case dw_val_class_addr:
1409 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1410
1411 case dw_val_class_offset:
1412 case dw_val_class_unsigned_const:
1413 case dw_val_class_const:
1414 case dw_val_class_unsigned_const_implicit:
1415 case dw_val_class_const_implicit:
1416 case dw_val_class_range_list:
1417 /* These are all HOST_WIDE_INT, signed or unsigned. */
1418 return a->v.val_unsigned == b->v.val_unsigned;
1419
1420 case dw_val_class_loc:
1421 return a->v.val_loc == b->v.val_loc;
1422 case dw_val_class_loc_list:
1423 return a->v.val_loc_list == b->v.val_loc_list;
1424 case dw_val_class_view_list:
1425 return a->v.val_view_list == b->v.val_view_list;
1426 case dw_val_class_die_ref:
1427 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1428 case dw_val_class_fde_ref:
1429 return a->v.val_fde_index == b->v.val_fde_index;
1430 case dw_val_class_symview:
1431 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1432 case dw_val_class_lbl_id:
1433 case dw_val_class_lineptr:
1434 case dw_val_class_macptr:
1435 case dw_val_class_loclistsptr:
1436 case dw_val_class_high_pc:
1437 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1438 case dw_val_class_str:
1439 return a->v.val_str == b->v.val_str;
1440 case dw_val_class_flag:
1441 return a->v.val_flag == b->v.val_flag;
1442 case dw_val_class_file:
1443 case dw_val_class_file_implicit:
1444 return a->v.val_file == b->v.val_file;
1445 case dw_val_class_decl_ref:
1446 return a->v.val_decl_ref == b->v.val_decl_ref;
1447
1448 case dw_val_class_const_double:
1449 return (a->v.val_double.high == b->v.val_double.high
1450 && a->v.val_double.low == b->v.val_double.low);
1451
1452 case dw_val_class_wide_int:
1453 return *a->v.val_wide == *b->v.val_wide;
1454
1455 case dw_val_class_vec:
1456 {
1457 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1458 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1459
1460 return (a_len == b_len
1461 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1462 }
1463
1464 case dw_val_class_data8:
1465 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1466
1467 case dw_val_class_vms_delta:
1468 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1469 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1470
1471 case dw_val_class_discr_value:
1472 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1473 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1474 case dw_val_class_discr_list:
1475 /* It makes no sense comparing two discriminant value lists. */
1476 return false;
1477 }
1478 gcc_unreachable ();
1479 }
1480
1481 /* Compare two location atoms for exact equality. */
1482
1483 static bool
1484 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1485 {
1486 if (a->dw_loc_opc != b->dw_loc_opc)
1487 return false;
1488
1489 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1490 address size, but since we always allocate cleared storage it
1491 should be zero for other types of locations. */
1492 if (a->dtprel != b->dtprel)
1493 return false;
1494
1495 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1496 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1497 }
1498
1499 /* Compare two complete location expressions for exact equality. */
1500
1501 bool
1502 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1503 {
1504 while (1)
1505 {
1506 if (a == b)
1507 return true;
1508 if (a == NULL || b == NULL)
1509 return false;
1510 if (!loc_descr_equal_p_1 (a, b))
1511 return false;
1512
1513 a = a->dw_loc_next;
1514 b = b->dw_loc_next;
1515 }
1516 }
1517
1518
1519 /* Add a constant POLY_OFFSET to a location expression. */
1520
1521 static void
1522 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1523 {
1524 dw_loc_descr_ref loc;
1525 HOST_WIDE_INT *p;
1526
1527 gcc_assert (*list_head != NULL);
1528
1529 if (known_eq (poly_offset, 0))
1530 return;
1531
1532 /* Find the end of the chain. */
1533 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1534 ;
1535
1536 HOST_WIDE_INT offset;
1537 if (!poly_offset.is_constant (&offset))
1538 {
1539 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1540 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1541 return;
1542 }
1543
1544 p = NULL;
1545 if (loc->dw_loc_opc == DW_OP_fbreg
1546 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1547 p = &loc->dw_loc_oprnd1.v.val_int;
1548 else if (loc->dw_loc_opc == DW_OP_bregx)
1549 p = &loc->dw_loc_oprnd2.v.val_int;
1550
1551 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1552 offset. Don't optimize if an signed integer overflow would happen. */
1553 if (p != NULL
1554 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1555 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1556 *p += offset;
1557
1558 else if (offset > 0)
1559 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1560
1561 else
1562 {
1563 loc->dw_loc_next
1564 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1565 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1566 }
1567 }
1568
1569 /* Return a pointer to a newly allocated location description for
1570 REG and OFFSET. */
1571
1572 static inline dw_loc_descr_ref
1573 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1574 {
1575 HOST_WIDE_INT const_offset;
1576 if (offset.is_constant (&const_offset))
1577 {
1578 if (reg <= 31)
1579 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1580 const_offset, 0);
1581 else
1582 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1583 }
1584 else
1585 {
1586 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1587 loc_descr_plus_const (&ret, offset);
1588 return ret;
1589 }
1590 }
1591
1592 /* Add a constant OFFSET to a location list. */
1593
1594 static void
1595 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1596 {
1597 dw_loc_list_ref d;
1598 for (d = list_head; d != NULL; d = d->dw_loc_next)
1599 loc_descr_plus_const (&d->expr, offset);
1600 }
1601
1602 #define DWARF_REF_SIZE \
1603 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1604
1605 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1606 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1607 DW_FORM_data16 with 128 bits. */
1608 #define DWARF_LARGEST_DATA_FORM_BITS \
1609 (dwarf_version >= 5 ? 128 : 64)
1610
1611 /* Utility inline function for construction of ops that were GNU extension
1612 before DWARF 5. */
1613 static inline enum dwarf_location_atom
1614 dwarf_OP (enum dwarf_location_atom op)
1615 {
1616 switch (op)
1617 {
1618 case DW_OP_implicit_pointer:
1619 if (dwarf_version < 5)
1620 return DW_OP_GNU_implicit_pointer;
1621 break;
1622
1623 case DW_OP_entry_value:
1624 if (dwarf_version < 5)
1625 return DW_OP_GNU_entry_value;
1626 break;
1627
1628 case DW_OP_const_type:
1629 if (dwarf_version < 5)
1630 return DW_OP_GNU_const_type;
1631 break;
1632
1633 case DW_OP_regval_type:
1634 if (dwarf_version < 5)
1635 return DW_OP_GNU_regval_type;
1636 break;
1637
1638 case DW_OP_deref_type:
1639 if (dwarf_version < 5)
1640 return DW_OP_GNU_deref_type;
1641 break;
1642
1643 case DW_OP_convert:
1644 if (dwarf_version < 5)
1645 return DW_OP_GNU_convert;
1646 break;
1647
1648 case DW_OP_reinterpret:
1649 if (dwarf_version < 5)
1650 return DW_OP_GNU_reinterpret;
1651 break;
1652
1653 case DW_OP_addrx:
1654 if (dwarf_version < 5)
1655 return DW_OP_GNU_addr_index;
1656 break;
1657
1658 case DW_OP_constx:
1659 if (dwarf_version < 5)
1660 return DW_OP_GNU_const_index;
1661 break;
1662
1663 default:
1664 break;
1665 }
1666 return op;
1667 }
1668
1669 /* Similarly for attributes. */
1670 static inline enum dwarf_attribute
1671 dwarf_AT (enum dwarf_attribute at)
1672 {
1673 switch (at)
1674 {
1675 case DW_AT_call_return_pc:
1676 if (dwarf_version < 5)
1677 return DW_AT_low_pc;
1678 break;
1679
1680 case DW_AT_call_tail_call:
1681 if (dwarf_version < 5)
1682 return DW_AT_GNU_tail_call;
1683 break;
1684
1685 case DW_AT_call_origin:
1686 if (dwarf_version < 5)
1687 return DW_AT_abstract_origin;
1688 break;
1689
1690 case DW_AT_call_target:
1691 if (dwarf_version < 5)
1692 return DW_AT_GNU_call_site_target;
1693 break;
1694
1695 case DW_AT_call_target_clobbered:
1696 if (dwarf_version < 5)
1697 return DW_AT_GNU_call_site_target_clobbered;
1698 break;
1699
1700 case DW_AT_call_parameter:
1701 if (dwarf_version < 5)
1702 return DW_AT_abstract_origin;
1703 break;
1704
1705 case DW_AT_call_value:
1706 if (dwarf_version < 5)
1707 return DW_AT_GNU_call_site_value;
1708 break;
1709
1710 case DW_AT_call_data_value:
1711 if (dwarf_version < 5)
1712 return DW_AT_GNU_call_site_data_value;
1713 break;
1714
1715 case DW_AT_call_all_calls:
1716 if (dwarf_version < 5)
1717 return DW_AT_GNU_all_call_sites;
1718 break;
1719
1720 case DW_AT_call_all_tail_calls:
1721 if (dwarf_version < 5)
1722 return DW_AT_GNU_all_tail_call_sites;
1723 break;
1724
1725 case DW_AT_dwo_name:
1726 if (dwarf_version < 5)
1727 return DW_AT_GNU_dwo_name;
1728 break;
1729
1730 case DW_AT_addr_base:
1731 if (dwarf_version < 5)
1732 return DW_AT_GNU_addr_base;
1733 break;
1734
1735 default:
1736 break;
1737 }
1738 return at;
1739 }
1740
1741 /* And similarly for tags. */
1742 static inline enum dwarf_tag
1743 dwarf_TAG (enum dwarf_tag tag)
1744 {
1745 switch (tag)
1746 {
1747 case DW_TAG_call_site:
1748 if (dwarf_version < 5)
1749 return DW_TAG_GNU_call_site;
1750 break;
1751
1752 case DW_TAG_call_site_parameter:
1753 if (dwarf_version < 5)
1754 return DW_TAG_GNU_call_site_parameter;
1755 break;
1756
1757 default:
1758 break;
1759 }
1760 return tag;
1761 }
1762
1763 /* And similarly for forms. */
1764 static inline enum dwarf_form
1765 dwarf_FORM (enum dwarf_form form)
1766 {
1767 switch (form)
1768 {
1769 case DW_FORM_addrx:
1770 if (dwarf_version < 5)
1771 return DW_FORM_GNU_addr_index;
1772 break;
1773
1774 case DW_FORM_strx:
1775 if (dwarf_version < 5)
1776 return DW_FORM_GNU_str_index;
1777 break;
1778
1779 default:
1780 break;
1781 }
1782 return form;
1783 }
1784
1785 static unsigned long int get_base_type_offset (dw_die_ref);
1786
1787 /* Return the size of a location descriptor. */
1788
1789 static unsigned long
1790 size_of_loc_descr (dw_loc_descr_ref loc)
1791 {
1792 unsigned long size = 1;
1793
1794 switch (loc->dw_loc_opc)
1795 {
1796 case DW_OP_addr:
1797 size += DWARF2_ADDR_SIZE;
1798 break;
1799 case DW_OP_GNU_addr_index:
1800 case DW_OP_addrx:
1801 case DW_OP_GNU_const_index:
1802 case DW_OP_constx:
1803 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1804 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1805 break;
1806 case DW_OP_const1u:
1807 case DW_OP_const1s:
1808 size += 1;
1809 break;
1810 case DW_OP_const2u:
1811 case DW_OP_const2s:
1812 size += 2;
1813 break;
1814 case DW_OP_const4u:
1815 case DW_OP_const4s:
1816 size += 4;
1817 break;
1818 case DW_OP_const8u:
1819 case DW_OP_const8s:
1820 size += 8;
1821 break;
1822 case DW_OP_constu:
1823 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1824 break;
1825 case DW_OP_consts:
1826 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1827 break;
1828 case DW_OP_pick:
1829 size += 1;
1830 break;
1831 case DW_OP_plus_uconst:
1832 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1833 break;
1834 case DW_OP_skip:
1835 case DW_OP_bra:
1836 size += 2;
1837 break;
1838 case DW_OP_breg0:
1839 case DW_OP_breg1:
1840 case DW_OP_breg2:
1841 case DW_OP_breg3:
1842 case DW_OP_breg4:
1843 case DW_OP_breg5:
1844 case DW_OP_breg6:
1845 case DW_OP_breg7:
1846 case DW_OP_breg8:
1847 case DW_OP_breg9:
1848 case DW_OP_breg10:
1849 case DW_OP_breg11:
1850 case DW_OP_breg12:
1851 case DW_OP_breg13:
1852 case DW_OP_breg14:
1853 case DW_OP_breg15:
1854 case DW_OP_breg16:
1855 case DW_OP_breg17:
1856 case DW_OP_breg18:
1857 case DW_OP_breg19:
1858 case DW_OP_breg20:
1859 case DW_OP_breg21:
1860 case DW_OP_breg22:
1861 case DW_OP_breg23:
1862 case DW_OP_breg24:
1863 case DW_OP_breg25:
1864 case DW_OP_breg26:
1865 case DW_OP_breg27:
1866 case DW_OP_breg28:
1867 case DW_OP_breg29:
1868 case DW_OP_breg30:
1869 case DW_OP_breg31:
1870 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1871 break;
1872 case DW_OP_regx:
1873 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1874 break;
1875 case DW_OP_fbreg:
1876 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1877 break;
1878 case DW_OP_bregx:
1879 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1880 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1881 break;
1882 case DW_OP_piece:
1883 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1884 break;
1885 case DW_OP_bit_piece:
1886 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1887 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1888 break;
1889 case DW_OP_deref_size:
1890 case DW_OP_xderef_size:
1891 size += 1;
1892 break;
1893 case DW_OP_call2:
1894 size += 2;
1895 break;
1896 case DW_OP_call4:
1897 size += 4;
1898 break;
1899 case DW_OP_call_ref:
1900 case DW_OP_GNU_variable_value:
1901 size += DWARF_REF_SIZE;
1902 break;
1903 case DW_OP_implicit_value:
1904 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1905 + loc->dw_loc_oprnd1.v.val_unsigned;
1906 break;
1907 case DW_OP_implicit_pointer:
1908 case DW_OP_GNU_implicit_pointer:
1909 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1910 break;
1911 case DW_OP_entry_value:
1912 case DW_OP_GNU_entry_value:
1913 {
1914 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1915 size += size_of_uleb128 (op_size) + op_size;
1916 break;
1917 }
1918 case DW_OP_const_type:
1919 case DW_OP_GNU_const_type:
1920 {
1921 unsigned long o
1922 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1923 size += size_of_uleb128 (o) + 1;
1924 switch (loc->dw_loc_oprnd2.val_class)
1925 {
1926 case dw_val_class_vec:
1927 size += loc->dw_loc_oprnd2.v.val_vec.length
1928 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1929 break;
1930 case dw_val_class_const:
1931 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1932 break;
1933 case dw_val_class_const_double:
1934 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1935 break;
1936 case dw_val_class_wide_int:
1937 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1938 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1939 break;
1940 default:
1941 gcc_unreachable ();
1942 }
1943 break;
1944 }
1945 case DW_OP_regval_type:
1946 case DW_OP_GNU_regval_type:
1947 {
1948 unsigned long o
1949 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1950 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1951 + size_of_uleb128 (o);
1952 }
1953 break;
1954 case DW_OP_deref_type:
1955 case DW_OP_GNU_deref_type:
1956 {
1957 unsigned long o
1958 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1959 size += 1 + size_of_uleb128 (o);
1960 }
1961 break;
1962 case DW_OP_convert:
1963 case DW_OP_reinterpret:
1964 case DW_OP_GNU_convert:
1965 case DW_OP_GNU_reinterpret:
1966 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1967 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1968 else
1969 {
1970 unsigned long o
1971 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1972 size += size_of_uleb128 (o);
1973 }
1974 break;
1975 case DW_OP_GNU_parameter_ref:
1976 size += 4;
1977 break;
1978 default:
1979 break;
1980 }
1981
1982 return size;
1983 }
1984
1985 /* Return the size of a series of location descriptors. */
1986
1987 unsigned long
1988 size_of_locs (dw_loc_descr_ref loc)
1989 {
1990 dw_loc_descr_ref l;
1991 unsigned long size;
1992
1993 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1994 field, to avoid writing to a PCH file. */
1995 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1996 {
1997 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1998 break;
1999 size += size_of_loc_descr (l);
2000 }
2001 if (! l)
2002 return size;
2003
2004 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2005 {
2006 l->dw_loc_addr = size;
2007 size += size_of_loc_descr (l);
2008 }
2009
2010 return size;
2011 }
2012
2013 /* Return the size of the value in a DW_AT_discr_value attribute. */
2014
2015 static int
2016 size_of_discr_value (dw_discr_value *discr_value)
2017 {
2018 if (discr_value->pos)
2019 return size_of_uleb128 (discr_value->v.uval);
2020 else
2021 return size_of_sleb128 (discr_value->v.sval);
2022 }
2023
2024 /* Return the size of the value in a DW_AT_discr_list attribute. */
2025
2026 static int
2027 size_of_discr_list (dw_discr_list_ref discr_list)
2028 {
2029 int size = 0;
2030
2031 for (dw_discr_list_ref list = discr_list;
2032 list != NULL;
2033 list = list->dw_discr_next)
2034 {
2035 /* One byte for the discriminant value descriptor, and then one or two
2036 LEB128 numbers, depending on whether it's a single case label or a
2037 range label. */
2038 size += 1;
2039 size += size_of_discr_value (&list->dw_discr_lower_bound);
2040 if (list->dw_discr_range != 0)
2041 size += size_of_discr_value (&list->dw_discr_upper_bound);
2042 }
2043 return size;
2044 }
2045
2046 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2047 static void get_ref_die_offset_label (char *, dw_die_ref);
2048 static unsigned long int get_ref_die_offset (dw_die_ref);
2049
2050 /* Output location description stack opcode's operands (if any).
2051 The for_eh_or_skip parameter controls whether register numbers are
2052 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2053 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2054 info). This should be suppressed for the cases that have not been converted
2055 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2056
2057 static void
2058 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2059 {
2060 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2061 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2062
2063 switch (loc->dw_loc_opc)
2064 {
2065 #ifdef DWARF2_DEBUGGING_INFO
2066 case DW_OP_const2u:
2067 case DW_OP_const2s:
2068 dw2_asm_output_data (2, val1->v.val_int, NULL);
2069 break;
2070 case DW_OP_const4u:
2071 if (loc->dtprel)
2072 {
2073 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2074 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2075 val1->v.val_addr);
2076 fputc ('\n', asm_out_file);
2077 break;
2078 }
2079 /* FALLTHRU */
2080 case DW_OP_const4s:
2081 dw2_asm_output_data (4, val1->v.val_int, NULL);
2082 break;
2083 case DW_OP_const8u:
2084 if (loc->dtprel)
2085 {
2086 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2087 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2088 val1->v.val_addr);
2089 fputc ('\n', asm_out_file);
2090 break;
2091 }
2092 /* FALLTHRU */
2093 case DW_OP_const8s:
2094 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2095 dw2_asm_output_data (8, val1->v.val_int, NULL);
2096 break;
2097 case DW_OP_skip:
2098 case DW_OP_bra:
2099 {
2100 int offset;
2101
2102 gcc_assert (val1->val_class == dw_val_class_loc);
2103 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2104
2105 dw2_asm_output_data (2, offset, NULL);
2106 }
2107 break;
2108 case DW_OP_implicit_value:
2109 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2110 switch (val2->val_class)
2111 {
2112 case dw_val_class_const:
2113 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2114 break;
2115 case dw_val_class_vec:
2116 {
2117 unsigned int elt_size = val2->v.val_vec.elt_size;
2118 unsigned int len = val2->v.val_vec.length;
2119 unsigned int i;
2120 unsigned char *p;
2121
2122 if (elt_size > sizeof (HOST_WIDE_INT))
2123 {
2124 elt_size /= 2;
2125 len *= 2;
2126 }
2127 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2128 i < len;
2129 i++, p += elt_size)
2130 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2131 "fp or vector constant word %u", i);
2132 }
2133 break;
2134 case dw_val_class_const_double:
2135 {
2136 unsigned HOST_WIDE_INT first, second;
2137
2138 if (WORDS_BIG_ENDIAN)
2139 {
2140 first = val2->v.val_double.high;
2141 second = val2->v.val_double.low;
2142 }
2143 else
2144 {
2145 first = val2->v.val_double.low;
2146 second = val2->v.val_double.high;
2147 }
2148 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2149 first, NULL);
2150 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2151 second, NULL);
2152 }
2153 break;
2154 case dw_val_class_wide_int:
2155 {
2156 int i;
2157 int len = get_full_len (*val2->v.val_wide);
2158 if (WORDS_BIG_ENDIAN)
2159 for (i = len - 1; i >= 0; --i)
2160 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2161 val2->v.val_wide->elt (i), NULL);
2162 else
2163 for (i = 0; i < len; ++i)
2164 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2165 val2->v.val_wide->elt (i), NULL);
2166 }
2167 break;
2168 case dw_val_class_addr:
2169 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2170 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2171 break;
2172 default:
2173 gcc_unreachable ();
2174 }
2175 break;
2176 #else
2177 case DW_OP_const2u:
2178 case DW_OP_const2s:
2179 case DW_OP_const4u:
2180 case DW_OP_const4s:
2181 case DW_OP_const8u:
2182 case DW_OP_const8s:
2183 case DW_OP_skip:
2184 case DW_OP_bra:
2185 case DW_OP_implicit_value:
2186 /* We currently don't make any attempt to make sure these are
2187 aligned properly like we do for the main unwind info, so
2188 don't support emitting things larger than a byte if we're
2189 only doing unwinding. */
2190 gcc_unreachable ();
2191 #endif
2192 case DW_OP_const1u:
2193 case DW_OP_const1s:
2194 dw2_asm_output_data (1, val1->v.val_int, NULL);
2195 break;
2196 case DW_OP_constu:
2197 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2198 break;
2199 case DW_OP_consts:
2200 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2201 break;
2202 case DW_OP_pick:
2203 dw2_asm_output_data (1, val1->v.val_int, NULL);
2204 break;
2205 case DW_OP_plus_uconst:
2206 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2207 break;
2208 case DW_OP_breg0:
2209 case DW_OP_breg1:
2210 case DW_OP_breg2:
2211 case DW_OP_breg3:
2212 case DW_OP_breg4:
2213 case DW_OP_breg5:
2214 case DW_OP_breg6:
2215 case DW_OP_breg7:
2216 case DW_OP_breg8:
2217 case DW_OP_breg9:
2218 case DW_OP_breg10:
2219 case DW_OP_breg11:
2220 case DW_OP_breg12:
2221 case DW_OP_breg13:
2222 case DW_OP_breg14:
2223 case DW_OP_breg15:
2224 case DW_OP_breg16:
2225 case DW_OP_breg17:
2226 case DW_OP_breg18:
2227 case DW_OP_breg19:
2228 case DW_OP_breg20:
2229 case DW_OP_breg21:
2230 case DW_OP_breg22:
2231 case DW_OP_breg23:
2232 case DW_OP_breg24:
2233 case DW_OP_breg25:
2234 case DW_OP_breg26:
2235 case DW_OP_breg27:
2236 case DW_OP_breg28:
2237 case DW_OP_breg29:
2238 case DW_OP_breg30:
2239 case DW_OP_breg31:
2240 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2241 break;
2242 case DW_OP_regx:
2243 {
2244 unsigned r = val1->v.val_unsigned;
2245 if (for_eh_or_skip >= 0)
2246 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2247 gcc_assert (size_of_uleb128 (r)
2248 == size_of_uleb128 (val1->v.val_unsigned));
2249 dw2_asm_output_data_uleb128 (r, NULL);
2250 }
2251 break;
2252 case DW_OP_fbreg:
2253 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2254 break;
2255 case DW_OP_bregx:
2256 {
2257 unsigned r = val1->v.val_unsigned;
2258 if (for_eh_or_skip >= 0)
2259 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2260 gcc_assert (size_of_uleb128 (r)
2261 == size_of_uleb128 (val1->v.val_unsigned));
2262 dw2_asm_output_data_uleb128 (r, NULL);
2263 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2264 }
2265 break;
2266 case DW_OP_piece:
2267 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2268 break;
2269 case DW_OP_bit_piece:
2270 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2271 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2272 break;
2273 case DW_OP_deref_size:
2274 case DW_OP_xderef_size:
2275 dw2_asm_output_data (1, val1->v.val_int, NULL);
2276 break;
2277
2278 case DW_OP_addr:
2279 if (loc->dtprel)
2280 {
2281 if (targetm.asm_out.output_dwarf_dtprel)
2282 {
2283 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2284 DWARF2_ADDR_SIZE,
2285 val1->v.val_addr);
2286 fputc ('\n', asm_out_file);
2287 }
2288 else
2289 gcc_unreachable ();
2290 }
2291 else
2292 {
2293 #ifdef DWARF2_DEBUGGING_INFO
2294 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2295 #else
2296 gcc_unreachable ();
2297 #endif
2298 }
2299 break;
2300
2301 case DW_OP_GNU_addr_index:
2302 case DW_OP_addrx:
2303 case DW_OP_GNU_const_index:
2304 case DW_OP_constx:
2305 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2306 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2307 "(index into .debug_addr)");
2308 break;
2309
2310 case DW_OP_call2:
2311 case DW_OP_call4:
2312 {
2313 unsigned long die_offset
2314 = get_ref_die_offset (val1->v.val_die_ref.die);
2315 /* Make sure the offset has been computed and that we can encode it as
2316 an operand. */
2317 gcc_assert (die_offset > 0
2318 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2319 ? 0xffff
2320 : 0xffffffff));
2321 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2322 die_offset, NULL);
2323 }
2324 break;
2325
2326 case DW_OP_call_ref:
2327 case DW_OP_GNU_variable_value:
2328 {
2329 char label[MAX_ARTIFICIAL_LABEL_BYTES
2330 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2331 gcc_assert (val1->val_class == dw_val_class_die_ref);
2332 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2333 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2334 }
2335 break;
2336
2337 case DW_OP_implicit_pointer:
2338 case DW_OP_GNU_implicit_pointer:
2339 {
2340 char label[MAX_ARTIFICIAL_LABEL_BYTES
2341 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2342 gcc_assert (val1->val_class == dw_val_class_die_ref);
2343 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2344 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2345 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2346 }
2347 break;
2348
2349 case DW_OP_entry_value:
2350 case DW_OP_GNU_entry_value:
2351 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2352 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2353 break;
2354
2355 case DW_OP_const_type:
2356 case DW_OP_GNU_const_type:
2357 {
2358 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2359 gcc_assert (o);
2360 dw2_asm_output_data_uleb128 (o, NULL);
2361 switch (val2->val_class)
2362 {
2363 case dw_val_class_const:
2364 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2365 dw2_asm_output_data (1, l, NULL);
2366 dw2_asm_output_data (l, val2->v.val_int, NULL);
2367 break;
2368 case dw_val_class_vec:
2369 {
2370 unsigned int elt_size = val2->v.val_vec.elt_size;
2371 unsigned int len = val2->v.val_vec.length;
2372 unsigned int i;
2373 unsigned char *p;
2374
2375 l = len * elt_size;
2376 dw2_asm_output_data (1, l, NULL);
2377 if (elt_size > sizeof (HOST_WIDE_INT))
2378 {
2379 elt_size /= 2;
2380 len *= 2;
2381 }
2382 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2383 i < len;
2384 i++, p += elt_size)
2385 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2386 "fp or vector constant word %u", i);
2387 }
2388 break;
2389 case dw_val_class_const_double:
2390 {
2391 unsigned HOST_WIDE_INT first, second;
2392 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2393
2394 dw2_asm_output_data (1, 2 * l, NULL);
2395 if (WORDS_BIG_ENDIAN)
2396 {
2397 first = val2->v.val_double.high;
2398 second = val2->v.val_double.low;
2399 }
2400 else
2401 {
2402 first = val2->v.val_double.low;
2403 second = val2->v.val_double.high;
2404 }
2405 dw2_asm_output_data (l, first, NULL);
2406 dw2_asm_output_data (l, second, NULL);
2407 }
2408 break;
2409 case dw_val_class_wide_int:
2410 {
2411 int i;
2412 int len = get_full_len (*val2->v.val_wide);
2413 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2414
2415 dw2_asm_output_data (1, len * l, NULL);
2416 if (WORDS_BIG_ENDIAN)
2417 for (i = len - 1; i >= 0; --i)
2418 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2419 else
2420 for (i = 0; i < len; ++i)
2421 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2422 }
2423 break;
2424 default:
2425 gcc_unreachable ();
2426 }
2427 }
2428 break;
2429 case DW_OP_regval_type:
2430 case DW_OP_GNU_regval_type:
2431 {
2432 unsigned r = val1->v.val_unsigned;
2433 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2434 gcc_assert (o);
2435 if (for_eh_or_skip >= 0)
2436 {
2437 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2438 gcc_assert (size_of_uleb128 (r)
2439 == size_of_uleb128 (val1->v.val_unsigned));
2440 }
2441 dw2_asm_output_data_uleb128 (r, NULL);
2442 dw2_asm_output_data_uleb128 (o, NULL);
2443 }
2444 break;
2445 case DW_OP_deref_type:
2446 case DW_OP_GNU_deref_type:
2447 {
2448 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2449 gcc_assert (o);
2450 dw2_asm_output_data (1, val1->v.val_int, NULL);
2451 dw2_asm_output_data_uleb128 (o, NULL);
2452 }
2453 break;
2454 case DW_OP_convert:
2455 case DW_OP_reinterpret:
2456 case DW_OP_GNU_convert:
2457 case DW_OP_GNU_reinterpret:
2458 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2459 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2460 else
2461 {
2462 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2463 gcc_assert (o);
2464 dw2_asm_output_data_uleb128 (o, NULL);
2465 }
2466 break;
2467
2468 case DW_OP_GNU_parameter_ref:
2469 {
2470 unsigned long o;
2471 gcc_assert (val1->val_class == dw_val_class_die_ref);
2472 o = get_ref_die_offset (val1->v.val_die_ref.die);
2473 dw2_asm_output_data (4, o, NULL);
2474 }
2475 break;
2476
2477 default:
2478 /* Other codes have no operands. */
2479 break;
2480 }
2481 }
2482
2483 /* Output a sequence of location operations.
2484 The for_eh_or_skip parameter controls whether register numbers are
2485 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2486 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2487 info). This should be suppressed for the cases that have not been converted
2488 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2489
2490 void
2491 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2492 {
2493 for (; loc != NULL; loc = loc->dw_loc_next)
2494 {
2495 enum dwarf_location_atom opc = loc->dw_loc_opc;
2496 /* Output the opcode. */
2497 if (for_eh_or_skip >= 0
2498 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2499 {
2500 unsigned r = (opc - DW_OP_breg0);
2501 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2502 gcc_assert (r <= 31);
2503 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2504 }
2505 else if (for_eh_or_skip >= 0
2506 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2507 {
2508 unsigned r = (opc - DW_OP_reg0);
2509 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2510 gcc_assert (r <= 31);
2511 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2512 }
2513
2514 dw2_asm_output_data (1, opc,
2515 "%s", dwarf_stack_op_name (opc));
2516
2517 /* Output the operand(s) (if any). */
2518 output_loc_operands (loc, for_eh_or_skip);
2519 }
2520 }
2521
2522 /* Output location description stack opcode's operands (if any).
2523 The output is single bytes on a line, suitable for .cfi_escape. */
2524
2525 static void
2526 output_loc_operands_raw (dw_loc_descr_ref loc)
2527 {
2528 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2529 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2530
2531 switch (loc->dw_loc_opc)
2532 {
2533 case DW_OP_addr:
2534 case DW_OP_GNU_addr_index:
2535 case DW_OP_addrx:
2536 case DW_OP_GNU_const_index:
2537 case DW_OP_constx:
2538 case DW_OP_implicit_value:
2539 /* We cannot output addresses in .cfi_escape, only bytes. */
2540 gcc_unreachable ();
2541
2542 case DW_OP_const1u:
2543 case DW_OP_const1s:
2544 case DW_OP_pick:
2545 case DW_OP_deref_size:
2546 case DW_OP_xderef_size:
2547 fputc (',', asm_out_file);
2548 dw2_asm_output_data_raw (1, val1->v.val_int);
2549 break;
2550
2551 case DW_OP_const2u:
2552 case DW_OP_const2s:
2553 fputc (',', asm_out_file);
2554 dw2_asm_output_data_raw (2, val1->v.val_int);
2555 break;
2556
2557 case DW_OP_const4u:
2558 case DW_OP_const4s:
2559 fputc (',', asm_out_file);
2560 dw2_asm_output_data_raw (4, val1->v.val_int);
2561 break;
2562
2563 case DW_OP_const8u:
2564 case DW_OP_const8s:
2565 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2566 fputc (',', asm_out_file);
2567 dw2_asm_output_data_raw (8, val1->v.val_int);
2568 break;
2569
2570 case DW_OP_skip:
2571 case DW_OP_bra:
2572 {
2573 int offset;
2574
2575 gcc_assert (val1->val_class == dw_val_class_loc);
2576 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2577
2578 fputc (',', asm_out_file);
2579 dw2_asm_output_data_raw (2, offset);
2580 }
2581 break;
2582
2583 case DW_OP_regx:
2584 {
2585 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2586 gcc_assert (size_of_uleb128 (r)
2587 == size_of_uleb128 (val1->v.val_unsigned));
2588 fputc (',', asm_out_file);
2589 dw2_asm_output_data_uleb128_raw (r);
2590 }
2591 break;
2592
2593 case DW_OP_constu:
2594 case DW_OP_plus_uconst:
2595 case DW_OP_piece:
2596 fputc (',', asm_out_file);
2597 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2598 break;
2599
2600 case DW_OP_bit_piece:
2601 fputc (',', asm_out_file);
2602 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2603 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2604 break;
2605
2606 case DW_OP_consts:
2607 case DW_OP_breg0:
2608 case DW_OP_breg1:
2609 case DW_OP_breg2:
2610 case DW_OP_breg3:
2611 case DW_OP_breg4:
2612 case DW_OP_breg5:
2613 case DW_OP_breg6:
2614 case DW_OP_breg7:
2615 case DW_OP_breg8:
2616 case DW_OP_breg9:
2617 case DW_OP_breg10:
2618 case DW_OP_breg11:
2619 case DW_OP_breg12:
2620 case DW_OP_breg13:
2621 case DW_OP_breg14:
2622 case DW_OP_breg15:
2623 case DW_OP_breg16:
2624 case DW_OP_breg17:
2625 case DW_OP_breg18:
2626 case DW_OP_breg19:
2627 case DW_OP_breg20:
2628 case DW_OP_breg21:
2629 case DW_OP_breg22:
2630 case DW_OP_breg23:
2631 case DW_OP_breg24:
2632 case DW_OP_breg25:
2633 case DW_OP_breg26:
2634 case DW_OP_breg27:
2635 case DW_OP_breg28:
2636 case DW_OP_breg29:
2637 case DW_OP_breg30:
2638 case DW_OP_breg31:
2639 case DW_OP_fbreg:
2640 fputc (',', asm_out_file);
2641 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2642 break;
2643
2644 case DW_OP_bregx:
2645 {
2646 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2647 gcc_assert (size_of_uleb128 (r)
2648 == size_of_uleb128 (val1->v.val_unsigned));
2649 fputc (',', asm_out_file);
2650 dw2_asm_output_data_uleb128_raw (r);
2651 fputc (',', asm_out_file);
2652 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2653 }
2654 break;
2655
2656 case DW_OP_implicit_pointer:
2657 case DW_OP_entry_value:
2658 case DW_OP_const_type:
2659 case DW_OP_regval_type:
2660 case DW_OP_deref_type:
2661 case DW_OP_convert:
2662 case DW_OP_reinterpret:
2663 case DW_OP_GNU_implicit_pointer:
2664 case DW_OP_GNU_entry_value:
2665 case DW_OP_GNU_const_type:
2666 case DW_OP_GNU_regval_type:
2667 case DW_OP_GNU_deref_type:
2668 case DW_OP_GNU_convert:
2669 case DW_OP_GNU_reinterpret:
2670 case DW_OP_GNU_parameter_ref:
2671 gcc_unreachable ();
2672 break;
2673
2674 default:
2675 /* Other codes have no operands. */
2676 break;
2677 }
2678 }
2679
2680 void
2681 output_loc_sequence_raw (dw_loc_descr_ref loc)
2682 {
2683 while (1)
2684 {
2685 enum dwarf_location_atom opc = loc->dw_loc_opc;
2686 /* Output the opcode. */
2687 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2688 {
2689 unsigned r = (opc - DW_OP_breg0);
2690 r = DWARF2_FRAME_REG_OUT (r, 1);
2691 gcc_assert (r <= 31);
2692 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2693 }
2694 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2695 {
2696 unsigned r = (opc - DW_OP_reg0);
2697 r = DWARF2_FRAME_REG_OUT (r, 1);
2698 gcc_assert (r <= 31);
2699 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2700 }
2701 /* Output the opcode. */
2702 fprintf (asm_out_file, "%#x", opc);
2703 output_loc_operands_raw (loc);
2704
2705 if (!loc->dw_loc_next)
2706 break;
2707 loc = loc->dw_loc_next;
2708
2709 fputc (',', asm_out_file);
2710 }
2711 }
2712
2713 /* This function builds a dwarf location descriptor sequence from a
2714 dw_cfa_location, adding the given OFFSET to the result of the
2715 expression. */
2716
2717 struct dw_loc_descr_node *
2718 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2719 {
2720 struct dw_loc_descr_node *head, *tmp;
2721
2722 offset += cfa->offset;
2723
2724 if (cfa->indirect)
2725 {
2726 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2727 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2728 head->dw_loc_oprnd1.val_entry = NULL;
2729 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2730 add_loc_descr (&head, tmp);
2731 loc_descr_plus_const (&head, offset);
2732 }
2733 else
2734 head = new_reg_loc_descr (cfa->reg, offset);
2735
2736 return head;
2737 }
2738
2739 /* This function builds a dwarf location descriptor sequence for
2740 the address at OFFSET from the CFA when stack is aligned to
2741 ALIGNMENT byte. */
2742
2743 struct dw_loc_descr_node *
2744 build_cfa_aligned_loc (dw_cfa_location *cfa,
2745 poly_int64 offset, HOST_WIDE_INT alignment)
2746 {
2747 struct dw_loc_descr_node *head;
2748 unsigned int dwarf_fp
2749 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2750
2751 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2752 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2753 {
2754 head = new_reg_loc_descr (dwarf_fp, 0);
2755 add_loc_descr (&head, int_loc_descriptor (alignment));
2756 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2757 loc_descr_plus_const (&head, offset);
2758 }
2759 else
2760 head = new_reg_loc_descr (dwarf_fp, offset);
2761 return head;
2762 }
2763 \f
2764 /* And now, the support for symbolic debugging information. */
2765
2766 /* .debug_str support. */
2767
2768 static void dwarf2out_init (const char *);
2769 static void dwarf2out_finish (const char *);
2770 static void dwarf2out_early_finish (const char *);
2771 static void dwarf2out_assembly_start (void);
2772 static void dwarf2out_define (unsigned int, const char *);
2773 static void dwarf2out_undef (unsigned int, const char *);
2774 static void dwarf2out_start_source_file (unsigned, const char *);
2775 static void dwarf2out_end_source_file (unsigned);
2776 static void dwarf2out_function_decl (tree);
2777 static void dwarf2out_begin_block (unsigned, unsigned);
2778 static void dwarf2out_end_block (unsigned, unsigned);
2779 static bool dwarf2out_ignore_block (const_tree);
2780 static void dwarf2out_early_global_decl (tree);
2781 static void dwarf2out_late_global_decl (tree);
2782 static void dwarf2out_type_decl (tree, int);
2783 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2784 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2785 dw_die_ref);
2786 static void dwarf2out_abstract_function (tree);
2787 static void dwarf2out_var_location (rtx_insn *);
2788 static void dwarf2out_inline_entry (tree);
2789 static void dwarf2out_size_function (tree);
2790 static void dwarf2out_begin_function (tree);
2791 static void dwarf2out_end_function (unsigned int);
2792 static void dwarf2out_register_main_translation_unit (tree unit);
2793 static void dwarf2out_set_name (tree, tree);
2794 static void dwarf2out_register_external_die (tree decl, const char *sym,
2795 unsigned HOST_WIDE_INT off);
2796 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2797 unsigned HOST_WIDE_INT *off);
2798
2799 /* The debug hooks structure. */
2800
2801 const struct gcc_debug_hooks dwarf2_debug_hooks =
2802 {
2803 dwarf2out_init,
2804 dwarf2out_finish,
2805 dwarf2out_early_finish,
2806 dwarf2out_assembly_start,
2807 dwarf2out_define,
2808 dwarf2out_undef,
2809 dwarf2out_start_source_file,
2810 dwarf2out_end_source_file,
2811 dwarf2out_begin_block,
2812 dwarf2out_end_block,
2813 dwarf2out_ignore_block,
2814 dwarf2out_source_line,
2815 dwarf2out_begin_prologue,
2816 #if VMS_DEBUGGING_INFO
2817 dwarf2out_vms_end_prologue,
2818 dwarf2out_vms_begin_epilogue,
2819 #else
2820 debug_nothing_int_charstar,
2821 debug_nothing_int_charstar,
2822 #endif
2823 dwarf2out_end_epilogue,
2824 dwarf2out_begin_function,
2825 dwarf2out_end_function, /* end_function */
2826 dwarf2out_register_main_translation_unit,
2827 dwarf2out_function_decl, /* function_decl */
2828 dwarf2out_early_global_decl,
2829 dwarf2out_late_global_decl,
2830 dwarf2out_type_decl, /* type_decl */
2831 dwarf2out_imported_module_or_decl,
2832 dwarf2out_die_ref_for_decl,
2833 dwarf2out_register_external_die,
2834 debug_nothing_tree, /* deferred_inline_function */
2835 /* The DWARF 2 backend tries to reduce debugging bloat by not
2836 emitting the abstract description of inline functions until
2837 something tries to reference them. */
2838 dwarf2out_abstract_function, /* outlining_inline_function */
2839 debug_nothing_rtx_code_label, /* label */
2840 debug_nothing_int, /* handle_pch */
2841 dwarf2out_var_location,
2842 dwarf2out_inline_entry, /* inline_entry */
2843 dwarf2out_size_function, /* size_function */
2844 dwarf2out_switch_text_section,
2845 dwarf2out_set_name,
2846 1, /* start_end_main_source_file */
2847 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2848 };
2849
2850 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2851 {
2852 dwarf2out_init,
2853 debug_nothing_charstar,
2854 debug_nothing_charstar,
2855 dwarf2out_assembly_start,
2856 debug_nothing_int_charstar,
2857 debug_nothing_int_charstar,
2858 debug_nothing_int_charstar,
2859 debug_nothing_int,
2860 debug_nothing_int_int, /* begin_block */
2861 debug_nothing_int_int, /* end_block */
2862 debug_true_const_tree, /* ignore_block */
2863 dwarf2out_source_line, /* source_line */
2864 debug_nothing_int_int_charstar, /* begin_prologue */
2865 debug_nothing_int_charstar, /* end_prologue */
2866 debug_nothing_int_charstar, /* begin_epilogue */
2867 debug_nothing_int_charstar, /* end_epilogue */
2868 debug_nothing_tree, /* begin_function */
2869 debug_nothing_int, /* end_function */
2870 debug_nothing_tree, /* register_main_translation_unit */
2871 debug_nothing_tree, /* function_decl */
2872 debug_nothing_tree, /* early_global_decl */
2873 debug_nothing_tree, /* late_global_decl */
2874 debug_nothing_tree_int, /* type_decl */
2875 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2876 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2877 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2878 debug_nothing_tree, /* deferred_inline_function */
2879 debug_nothing_tree, /* outlining_inline_function */
2880 debug_nothing_rtx_code_label, /* label */
2881 debug_nothing_int, /* handle_pch */
2882 debug_nothing_rtx_insn, /* var_location */
2883 debug_nothing_tree, /* inline_entry */
2884 debug_nothing_tree, /* size_function */
2885 debug_nothing_void, /* switch_text_section */
2886 debug_nothing_tree_tree, /* set_name */
2887 0, /* start_end_main_source_file */
2888 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2889 };
2890 \f
2891 /* NOTE: In the comments in this file, many references are made to
2892 "Debugging Information Entries". This term is abbreviated as `DIE'
2893 throughout the remainder of this file. */
2894
2895 /* An internal representation of the DWARF output is built, and then
2896 walked to generate the DWARF debugging info. The walk of the internal
2897 representation is done after the entire program has been compiled.
2898 The types below are used to describe the internal representation. */
2899
2900 /* Whether to put type DIEs into their own section .debug_types instead
2901 of making them part of the .debug_info section. Only supported for
2902 Dwarf V4 or higher and the user didn't disable them through
2903 -fno-debug-types-section. It is more efficient to put them in a
2904 separate comdat sections since the linker will then be able to
2905 remove duplicates. But not all tools support .debug_types sections
2906 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2907 it is DW_UT_type unit type in .debug_info section. */
2908
2909 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2910
2911 /* Various DIE's use offsets relative to the beginning of the
2912 .debug_info section to refer to each other. */
2913
2914 typedef long int dw_offset;
2915
2916 struct comdat_type_node;
2917
2918 /* The entries in the line_info table more-or-less mirror the opcodes
2919 that are used in the real dwarf line table. Arrays of these entries
2920 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2921 supported. */
2922
2923 enum dw_line_info_opcode {
2924 /* Emit DW_LNE_set_address; the operand is the label index. */
2925 LI_set_address,
2926
2927 /* Emit a row to the matrix with the given line. This may be done
2928 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2929 special opcodes. */
2930 LI_set_line,
2931
2932 /* Emit a DW_LNS_set_file. */
2933 LI_set_file,
2934
2935 /* Emit a DW_LNS_set_column. */
2936 LI_set_column,
2937
2938 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2939 LI_negate_stmt,
2940
2941 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2942 LI_set_prologue_end,
2943 LI_set_epilogue_begin,
2944
2945 /* Emit a DW_LNE_set_discriminator. */
2946 LI_set_discriminator,
2947
2948 /* Output a Fixed Advance PC; the target PC is the label index; the
2949 base PC is the previous LI_adv_address or LI_set_address entry.
2950 We only use this when emitting debug views without assembler
2951 support, at explicit user request. Ideally, we should only use
2952 it when the offset might be zero but we can't tell: it's the only
2953 way to maybe change the PC without resetting the view number. */
2954 LI_adv_address
2955 };
2956
2957 typedef struct GTY(()) dw_line_info_struct {
2958 enum dw_line_info_opcode opcode;
2959 unsigned int val;
2960 } dw_line_info_entry;
2961
2962
2963 struct GTY(()) dw_line_info_table {
2964 /* The label that marks the end of this section. */
2965 const char *end_label;
2966
2967 /* The values for the last row of the matrix, as collected in the table.
2968 These are used to minimize the changes to the next row. */
2969 unsigned int file_num;
2970 unsigned int line_num;
2971 unsigned int column_num;
2972 int discrim_num;
2973 bool is_stmt;
2974 bool in_use;
2975
2976 /* This denotes the NEXT view number.
2977
2978 If it is 0, it is known that the NEXT view will be the first view
2979 at the given PC.
2980
2981 If it is -1, we're forcing the view number to be reset, e.g. at a
2982 function entry.
2983
2984 The meaning of other nonzero values depends on whether we're
2985 computing views internally or leaving it for the assembler to do
2986 so. If we're emitting them internally, view denotes the view
2987 number since the last known advance of PC. If we're leaving it
2988 for the assembler, it denotes the LVU label number that we're
2989 going to ask the assembler to assign. */
2990 var_loc_view view;
2991
2992 /* This counts the number of symbolic views emitted in this table
2993 since the latest view reset. Its max value, over all tables,
2994 sets symview_upper_bound. */
2995 var_loc_view symviews_since_reset;
2996
2997 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
2998 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
2999 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3000 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3001
3002 vec<dw_line_info_entry, va_gc> *entries;
3003 };
3004
3005 /* This is an upper bound for view numbers that the assembler may
3006 assign to symbolic views output in this translation. It is used to
3007 decide how big a field to use to represent view numbers in
3008 symview-classed attributes. */
3009
3010 static var_loc_view symview_upper_bound;
3011
3012 /* If we're keep track of location views and their reset points, and
3013 INSN is a reset point (i.e., it necessarily advances the PC), mark
3014 the next view in TABLE as reset. */
3015
3016 static void
3017 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3018 {
3019 if (!debug_internal_reset_location_views)
3020 return;
3021
3022 /* Maybe turn (part of?) this test into a default target hook. */
3023 int reset = 0;
3024
3025 if (targetm.reset_location_view)
3026 reset = targetm.reset_location_view (insn);
3027
3028 if (reset)
3029 ;
3030 else if (JUMP_TABLE_DATA_P (insn))
3031 reset = 1;
3032 else if (GET_CODE (insn) == USE
3033 || GET_CODE (insn) == CLOBBER
3034 || GET_CODE (insn) == ASM_INPUT
3035 || asm_noperands (insn) >= 0)
3036 ;
3037 else if (get_attr_min_length (insn) > 0)
3038 reset = 1;
3039
3040 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3041 RESET_NEXT_VIEW (table->view);
3042 }
3043
3044 /* Each DIE attribute has a field specifying the attribute kind,
3045 a link to the next attribute in the chain, and an attribute value.
3046 Attributes are typically linked below the DIE they modify. */
3047
3048 typedef struct GTY(()) dw_attr_struct {
3049 enum dwarf_attribute dw_attr;
3050 dw_val_node dw_attr_val;
3051 }
3052 dw_attr_node;
3053
3054
3055 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3056 The children of each node form a circular list linked by
3057 die_sib. die_child points to the node *before* the "first" child node. */
3058
3059 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3060 union die_symbol_or_type_node
3061 {
3062 const char * GTY ((tag ("0"))) die_symbol;
3063 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3064 }
3065 GTY ((desc ("%0.comdat_type_p"))) die_id;
3066 vec<dw_attr_node, va_gc> *die_attr;
3067 dw_die_ref die_parent;
3068 dw_die_ref die_child;
3069 dw_die_ref die_sib;
3070 dw_die_ref die_definition; /* ref from a specification to its definition */
3071 dw_offset die_offset;
3072 unsigned long die_abbrev;
3073 int die_mark;
3074 unsigned int decl_id;
3075 enum dwarf_tag die_tag;
3076 /* Die is used and must not be pruned as unused. */
3077 BOOL_BITFIELD die_perennial_p : 1;
3078 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3079 /* For an external ref to die_symbol if die_offset contains an extra
3080 offset to that symbol. */
3081 BOOL_BITFIELD with_offset : 1;
3082 /* Whether this DIE was removed from the DIE tree, for example via
3083 prune_unused_types. We don't consider those present from the
3084 DIE lookup routines. */
3085 BOOL_BITFIELD removed : 1;
3086 /* Lots of spare bits. */
3087 }
3088 die_node;
3089
3090 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3091 static bool early_dwarf;
3092 static bool early_dwarf_finished;
3093 struct set_early_dwarf {
3094 bool saved;
3095 set_early_dwarf () : saved(early_dwarf)
3096 {
3097 gcc_assert (! early_dwarf_finished);
3098 early_dwarf = true;
3099 }
3100 ~set_early_dwarf () { early_dwarf = saved; }
3101 };
3102
3103 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3104 #define FOR_EACH_CHILD(die, c, expr) do { \
3105 c = die->die_child; \
3106 if (c) do { \
3107 c = c->die_sib; \
3108 expr; \
3109 } while (c != die->die_child); \
3110 } while (0)
3111
3112 /* The pubname structure */
3113
3114 typedef struct GTY(()) pubname_struct {
3115 dw_die_ref die;
3116 const char *name;
3117 }
3118 pubname_entry;
3119
3120
3121 struct GTY(()) dw_ranges {
3122 const char *label;
3123 /* If this is positive, it's a block number, otherwise it's a
3124 bitwise-negated index into dw_ranges_by_label. */
3125 int num;
3126 /* Index for the range list for DW_FORM_rnglistx. */
3127 unsigned int idx : 31;
3128 /* True if this range might be possibly in a different section
3129 from previous entry. */
3130 unsigned int maybe_new_sec : 1;
3131 };
3132
3133 /* A structure to hold a macinfo entry. */
3134
3135 typedef struct GTY(()) macinfo_struct {
3136 unsigned char code;
3137 unsigned HOST_WIDE_INT lineno;
3138 const char *info;
3139 }
3140 macinfo_entry;
3141
3142
3143 struct GTY(()) dw_ranges_by_label {
3144 const char *begin;
3145 const char *end;
3146 };
3147
3148 /* The comdat type node structure. */
3149 struct GTY(()) comdat_type_node
3150 {
3151 dw_die_ref root_die;
3152 dw_die_ref type_die;
3153 dw_die_ref skeleton_die;
3154 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3155 comdat_type_node *next;
3156 };
3157
3158 /* A list of DIEs for which we can't determine ancestry (parent_die
3159 field) just yet. Later in dwarf2out_finish we will fill in the
3160 missing bits. */
3161 typedef struct GTY(()) limbo_die_struct {
3162 dw_die_ref die;
3163 /* The tree for which this DIE was created. We use this to
3164 determine ancestry later. */
3165 tree created_for;
3166 struct limbo_die_struct *next;
3167 }
3168 limbo_die_node;
3169
3170 typedef struct skeleton_chain_struct
3171 {
3172 dw_die_ref old_die;
3173 dw_die_ref new_die;
3174 struct skeleton_chain_struct *parent;
3175 }
3176 skeleton_chain_node;
3177
3178 /* Define a macro which returns nonzero for a TYPE_DECL which was
3179 implicitly generated for a type.
3180
3181 Note that, unlike the C front-end (which generates a NULL named
3182 TYPE_DECL node for each complete tagged type, each array type,
3183 and each function type node created) the C++ front-end generates
3184 a _named_ TYPE_DECL node for each tagged type node created.
3185 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3186 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3187 front-end, but for each type, tagged or not. */
3188
3189 #define TYPE_DECL_IS_STUB(decl) \
3190 (DECL_NAME (decl) == NULL_TREE \
3191 || (DECL_ARTIFICIAL (decl) \
3192 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3193 /* This is necessary for stub decls that \
3194 appear in nested inline functions. */ \
3195 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3196 && (decl_ultimate_origin (decl) \
3197 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3198
3199 /* Information concerning the compilation unit's programming
3200 language, and compiler version. */
3201
3202 /* Fixed size portion of the DWARF compilation unit header. */
3203 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3204 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3205 + (dwarf_version >= 5 ? 4 : 3))
3206
3207 /* Fixed size portion of the DWARF comdat type unit header. */
3208 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3209 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3210 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3211
3212 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3213 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3214 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3215
3216 /* Fixed size portion of public names info. */
3217 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3218
3219 /* Fixed size portion of the address range info. */
3220 #define DWARF_ARANGES_HEADER_SIZE \
3221 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3222 DWARF2_ADDR_SIZE * 2) \
3223 - DWARF_INITIAL_LENGTH_SIZE)
3224
3225 /* Size of padding portion in the address range info. It must be
3226 aligned to twice the pointer size. */
3227 #define DWARF_ARANGES_PAD_SIZE \
3228 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3229 DWARF2_ADDR_SIZE * 2) \
3230 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3231
3232 /* Use assembler line directives if available. */
3233 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3234 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3235 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3236 #else
3237 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3238 #endif
3239 #endif
3240
3241 /* Use assembler views in line directives if available. */
3242 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3243 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3244 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3245 #else
3246 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3247 #endif
3248 #endif
3249
3250 /* Return true if GCC configure detected assembler support for .loc. */
3251
3252 bool
3253 dwarf2out_default_as_loc_support (void)
3254 {
3255 return DWARF2_ASM_LINE_DEBUG_INFO;
3256 #if (GCC_VERSION >= 3000)
3257 # undef DWARF2_ASM_LINE_DEBUG_INFO
3258 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3259 #endif
3260 }
3261
3262 /* Return true if GCC configure detected assembler support for views
3263 in .loc directives. */
3264
3265 bool
3266 dwarf2out_default_as_locview_support (void)
3267 {
3268 return DWARF2_ASM_VIEW_DEBUG_INFO;
3269 #if (GCC_VERSION >= 3000)
3270 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3271 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3272 #endif
3273 }
3274
3275 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3276 view computation, and it refers to a view identifier for which we
3277 will not emit a label because it is known to map to a view number
3278 zero. We won't allocate the bitmap if we're not using assembler
3279 support for location views, but we have to make the variable
3280 visible for GGC and for code that will be optimized out for lack of
3281 support but that's still parsed and compiled. We could abstract it
3282 out with macros, but it's not worth it. */
3283 static GTY(()) bitmap zero_view_p;
3284
3285 /* Evaluate to TRUE iff N is known to identify the first location view
3286 at its PC. When not using assembler location view computation,
3287 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3288 and views label numbers recorded in it are the ones known to be
3289 zero. */
3290 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3291 || (N) == (var_loc_view)-1 \
3292 || (zero_view_p \
3293 && bitmap_bit_p (zero_view_p, (N))))
3294
3295 /* Return true iff we're to emit .loc directives for the assembler to
3296 generate line number sections.
3297
3298 When we're not emitting views, all we need from the assembler is
3299 support for .loc directives.
3300
3301 If we are emitting views, we can only use the assembler's .loc
3302 support if it also supports views.
3303
3304 When the compiler is emitting the line number programs and
3305 computing view numbers itself, it resets view numbers at known PC
3306 changes and counts from that, and then it emits view numbers as
3307 literal constants in locviewlists. There are cases in which the
3308 compiler is not sure about PC changes, e.g. when extra alignment is
3309 requested for a label. In these cases, the compiler may not reset
3310 the view counter, and the potential PC advance in the line number
3311 program will use an opcode that does not reset the view counter
3312 even if the PC actually changes, so that compiler and debug info
3313 consumer can keep view numbers in sync.
3314
3315 When the compiler defers view computation to the assembler, it
3316 emits symbolic view numbers in locviewlists, with the exception of
3317 views known to be zero (forced resets, or reset after
3318 compiler-visible PC changes): instead of emitting symbols for
3319 these, we emit literal zero and assert the assembler agrees with
3320 the compiler's assessment. We could use symbolic views everywhere,
3321 instead of special-casing zero views, but then we'd be unable to
3322 optimize out locviewlists that contain only zeros. */
3323
3324 static bool
3325 output_asm_line_debug_info (void)
3326 {
3327 return (dwarf2out_as_loc_support
3328 && (dwarf2out_as_locview_support
3329 || !debug_variable_location_views));
3330 }
3331
3332 /* Minimum line offset in a special line info. opcode.
3333 This value was chosen to give a reasonable range of values. */
3334 #define DWARF_LINE_BASE -10
3335
3336 /* First special line opcode - leave room for the standard opcodes. */
3337 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3338
3339 /* Range of line offsets in a special line info. opcode. */
3340 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3341
3342 /* Flag that indicates the initial value of the is_stmt_start flag.
3343 In the present implementation, we do not mark any lines as
3344 the beginning of a source statement, because that information
3345 is not made available by the GCC front-end. */
3346 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3347
3348 /* Maximum number of operations per instruction bundle. */
3349 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3350 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3351 #endif
3352
3353 /* This location is used by calc_die_sizes() to keep track
3354 the offset of each DIE within the .debug_info section. */
3355 static unsigned long next_die_offset;
3356
3357 /* Record the root of the DIE's built for the current compilation unit. */
3358 static GTY(()) dw_die_ref single_comp_unit_die;
3359
3360 /* A list of type DIEs that have been separated into comdat sections. */
3361 static GTY(()) comdat_type_node *comdat_type_list;
3362
3363 /* A list of CU DIEs that have been separated. */
3364 static GTY(()) limbo_die_node *cu_die_list;
3365
3366 /* A list of DIEs with a NULL parent waiting to be relocated. */
3367 static GTY(()) limbo_die_node *limbo_die_list;
3368
3369 /* A list of DIEs for which we may have to generate
3370 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3371 static GTY(()) limbo_die_node *deferred_asm_name;
3372
3373 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3374 {
3375 typedef const char *compare_type;
3376
3377 static hashval_t hash (dwarf_file_data *);
3378 static bool equal (dwarf_file_data *, const char *);
3379 };
3380
3381 /* Filenames referenced by this compilation unit. */
3382 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3383
3384 struct decl_die_hasher : ggc_ptr_hash<die_node>
3385 {
3386 typedef tree compare_type;
3387
3388 static hashval_t hash (die_node *);
3389 static bool equal (die_node *, tree);
3390 };
3391 /* A hash table of references to DIE's that describe declarations.
3392 The key is a DECL_UID() which is a unique number identifying each decl. */
3393 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3394
3395 struct GTY ((for_user)) variable_value_struct {
3396 unsigned int decl_id;
3397 vec<dw_die_ref, va_gc> *dies;
3398 };
3399
3400 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3401 {
3402 typedef tree compare_type;
3403
3404 static hashval_t hash (variable_value_struct *);
3405 static bool equal (variable_value_struct *, tree);
3406 };
3407 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3408 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3409 DECL_CONTEXT of the referenced VAR_DECLs. */
3410 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3411
3412 struct block_die_hasher : ggc_ptr_hash<die_struct>
3413 {
3414 static hashval_t hash (die_struct *);
3415 static bool equal (die_struct *, die_struct *);
3416 };
3417
3418 /* A hash table of references to DIE's that describe COMMON blocks.
3419 The key is DECL_UID() ^ die_parent. */
3420 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3421
3422 typedef struct GTY(()) die_arg_entry_struct {
3423 dw_die_ref die;
3424 tree arg;
3425 } die_arg_entry;
3426
3427
3428 /* Node of the variable location list. */
3429 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3430 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3431 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3432 in mode of the EXPR_LIST node and first EXPR_LIST operand
3433 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3434 location or NULL for padding. For larger bitsizes,
3435 mode is 0 and first operand is a CONCAT with bitsize
3436 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3437 NULL as second operand. */
3438 rtx GTY (()) loc;
3439 const char * GTY (()) label;
3440 struct var_loc_node * GTY (()) next;
3441 var_loc_view view;
3442 };
3443
3444 /* Variable location list. */
3445 struct GTY ((for_user)) var_loc_list_def {
3446 struct var_loc_node * GTY (()) first;
3447
3448 /* Pointer to the last but one or last element of the
3449 chained list. If the list is empty, both first and
3450 last are NULL, if the list contains just one node
3451 or the last node certainly is not redundant, it points
3452 to the last node, otherwise points to the last but one.
3453 Do not mark it for GC because it is marked through the chain. */
3454 struct var_loc_node * GTY ((skip ("%h"))) last;
3455
3456 /* Pointer to the last element before section switch,
3457 if NULL, either sections weren't switched or first
3458 is after section switch. */
3459 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3460
3461 /* DECL_UID of the variable decl. */
3462 unsigned int decl_id;
3463 };
3464 typedef struct var_loc_list_def var_loc_list;
3465
3466 /* Call argument location list. */
3467 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3468 rtx GTY (()) call_arg_loc_note;
3469 const char * GTY (()) label;
3470 tree GTY (()) block;
3471 bool tail_call_p;
3472 rtx GTY (()) symbol_ref;
3473 struct call_arg_loc_node * GTY (()) next;
3474 };
3475
3476
3477 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3478 {
3479 typedef const_tree compare_type;
3480
3481 static hashval_t hash (var_loc_list *);
3482 static bool equal (var_loc_list *, const_tree);
3483 };
3484
3485 /* Table of decl location linked lists. */
3486 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3487
3488 /* Head and tail of call_arg_loc chain. */
3489 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3490 static struct call_arg_loc_node *call_arg_loc_last;
3491
3492 /* Number of call sites in the current function. */
3493 static int call_site_count = -1;
3494 /* Number of tail call sites in the current function. */
3495 static int tail_call_site_count = -1;
3496
3497 /* A cached location list. */
3498 struct GTY ((for_user)) cached_dw_loc_list_def {
3499 /* The DECL_UID of the decl that this entry describes. */
3500 unsigned int decl_id;
3501
3502 /* The cached location list. */
3503 dw_loc_list_ref loc_list;
3504 };
3505 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3506
3507 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3508 {
3509
3510 typedef const_tree compare_type;
3511
3512 static hashval_t hash (cached_dw_loc_list *);
3513 static bool equal (cached_dw_loc_list *, const_tree);
3514 };
3515
3516 /* Table of cached location lists. */
3517 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3518
3519 /* A vector of references to DIE's that are uniquely identified by their tag,
3520 presence/absence of children DIE's, and list of attribute/value pairs. */
3521 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3522
3523 /* A hash map to remember the stack usage for DWARF procedures. The value
3524 stored is the stack size difference between before the DWARF procedure
3525 invokation and after it returned. In other words, for a DWARF procedure
3526 that consumes N stack slots and that pushes M ones, this stores M - N. */
3527 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3528
3529 /* A global counter for generating labels for line number data. */
3530 static unsigned int line_info_label_num;
3531
3532 /* The current table to which we should emit line number information
3533 for the current function. This will be set up at the beginning of
3534 assembly for the function. */
3535 static GTY(()) dw_line_info_table *cur_line_info_table;
3536
3537 /* The two default tables of line number info. */
3538 static GTY(()) dw_line_info_table *text_section_line_info;
3539 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3540
3541 /* The set of all non-default tables of line number info. */
3542 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3543
3544 /* A flag to tell pubnames/types export if there is an info section to
3545 refer to. */
3546 static bool info_section_emitted;
3547
3548 /* A pointer to the base of a table that contains a list of publicly
3549 accessible names. */
3550 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3551
3552 /* A pointer to the base of a table that contains a list of publicly
3553 accessible types. */
3554 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3555
3556 /* A pointer to the base of a table that contains a list of macro
3557 defines/undefines (and file start/end markers). */
3558 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3559
3560 /* True if .debug_macinfo or .debug_macros section is going to be
3561 emitted. */
3562 #define have_macinfo \
3563 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3564 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3565 && !macinfo_table->is_empty ())
3566
3567 /* Vector of dies for which we should generate .debug_ranges info. */
3568 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3569
3570 /* Vector of pairs of labels referenced in ranges_table. */
3571 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3572
3573 /* Whether we have location lists that need outputting */
3574 static GTY(()) bool have_location_lists;
3575
3576 /* Unique label counter. */
3577 static GTY(()) unsigned int loclabel_num;
3578
3579 /* Unique label counter for point-of-call tables. */
3580 static GTY(()) unsigned int poc_label_num;
3581
3582 /* The last file entry emitted by maybe_emit_file(). */
3583 static GTY(()) struct dwarf_file_data * last_emitted_file;
3584
3585 /* Number of internal labels generated by gen_internal_sym(). */
3586 static GTY(()) int label_num;
3587
3588 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3589
3590 /* Instances of generic types for which we need to generate debug
3591 info that describe their generic parameters and arguments. That
3592 generation needs to happen once all types are properly laid out so
3593 we do it at the end of compilation. */
3594 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3595
3596 /* Offset from the "steady-state frame pointer" to the frame base,
3597 within the current function. */
3598 static poly_int64 frame_pointer_fb_offset;
3599 static bool frame_pointer_fb_offset_valid;
3600
3601 static vec<dw_die_ref> base_types;
3602
3603 /* Flags to represent a set of attribute classes for attributes that represent
3604 a scalar value (bounds, pointers, ...). */
3605 enum dw_scalar_form
3606 {
3607 dw_scalar_form_constant = 0x01,
3608 dw_scalar_form_exprloc = 0x02,
3609 dw_scalar_form_reference = 0x04
3610 };
3611
3612 /* Forward declarations for functions defined in this file. */
3613
3614 static int is_pseudo_reg (const_rtx);
3615 static tree type_main_variant (tree);
3616 static int is_tagged_type (const_tree);
3617 static const char *dwarf_tag_name (unsigned);
3618 static const char *dwarf_attr_name (unsigned);
3619 static const char *dwarf_form_name (unsigned);
3620 static tree decl_ultimate_origin (const_tree);
3621 static tree decl_class_context (tree);
3622 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3623 static inline enum dw_val_class AT_class (dw_attr_node *);
3624 static inline unsigned int AT_index (dw_attr_node *);
3625 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3626 static inline unsigned AT_flag (dw_attr_node *);
3627 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3628 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3629 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3630 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3631 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3632 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3633 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3634 unsigned int, unsigned char *);
3635 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3636 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3637 static inline const char *AT_string (dw_attr_node *);
3638 static enum dwarf_form AT_string_form (dw_attr_node *);
3639 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3640 static void add_AT_specification (dw_die_ref, dw_die_ref);
3641 static inline dw_die_ref AT_ref (dw_attr_node *);
3642 static inline int AT_ref_external (dw_attr_node *);
3643 static inline void set_AT_ref_external (dw_attr_node *, int);
3644 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3645 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3646 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3647 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3648 dw_loc_list_ref);
3649 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3650 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3651 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3652 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3653 static void remove_addr_table_entry (addr_table_entry *);
3654 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3655 static inline rtx AT_addr (dw_attr_node *);
3656 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3657 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3658 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3659 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3660 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3661 const char *);
3662 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3663 unsigned HOST_WIDE_INT);
3664 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3665 unsigned long, bool);
3666 static inline const char *AT_lbl (dw_attr_node *);
3667 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3668 static const char *get_AT_low_pc (dw_die_ref);
3669 static const char *get_AT_hi_pc (dw_die_ref);
3670 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3671 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3672 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3673 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3674 static bool is_cxx (void);
3675 static bool is_cxx (const_tree);
3676 static bool is_fortran (void);
3677 static bool is_ada (void);
3678 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3679 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3680 static void add_child_die (dw_die_ref, dw_die_ref);
3681 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3682 static dw_die_ref lookup_type_die (tree);
3683 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3684 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3685 static void equate_type_number_to_die (tree, dw_die_ref);
3686 static dw_die_ref lookup_decl_die (tree);
3687 static var_loc_list *lookup_decl_loc (const_tree);
3688 static void equate_decl_number_to_die (tree, dw_die_ref);
3689 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3690 static void print_spaces (FILE *);
3691 static void print_die (dw_die_ref, FILE *);
3692 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3693 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3694 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3695 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3696 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3697 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3698 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3699 struct md5_ctx *, int *);
3700 struct checksum_attributes;
3701 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3702 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3703 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3704 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3705 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3706 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3707 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3708 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3709 static int is_type_die (dw_die_ref);
3710 static int is_comdat_die (dw_die_ref);
3711 static inline bool is_template_instantiation (dw_die_ref);
3712 static int is_declaration_die (dw_die_ref);
3713 static int should_move_die_to_comdat (dw_die_ref);
3714 static dw_die_ref clone_as_declaration (dw_die_ref);
3715 static dw_die_ref clone_die (dw_die_ref);
3716 static dw_die_ref clone_tree (dw_die_ref);
3717 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3718 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3719 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3720 static dw_die_ref generate_skeleton (dw_die_ref);
3721 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3722 dw_die_ref,
3723 dw_die_ref);
3724 static void break_out_comdat_types (dw_die_ref);
3725 static void copy_decls_for_unworthy_types (dw_die_ref);
3726
3727 static void add_sibling_attributes (dw_die_ref);
3728 static void output_location_lists (dw_die_ref);
3729 static int constant_size (unsigned HOST_WIDE_INT);
3730 static unsigned long size_of_die (dw_die_ref);
3731 static void calc_die_sizes (dw_die_ref);
3732 static void calc_base_type_die_sizes (void);
3733 static void mark_dies (dw_die_ref);
3734 static void unmark_dies (dw_die_ref);
3735 static void unmark_all_dies (dw_die_ref);
3736 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3737 static unsigned long size_of_aranges (void);
3738 static enum dwarf_form value_format (dw_attr_node *);
3739 static void output_value_format (dw_attr_node *);
3740 static void output_abbrev_section (void);
3741 static void output_die_abbrevs (unsigned long, dw_die_ref);
3742 static void output_die (dw_die_ref);
3743 static void output_compilation_unit_header (enum dwarf_unit_type);
3744 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3745 static void output_comdat_type_unit (comdat_type_node *);
3746 static const char *dwarf2_name (tree, int);
3747 static void add_pubname (tree, dw_die_ref);
3748 static void add_enumerator_pubname (const char *, dw_die_ref);
3749 static void add_pubname_string (const char *, dw_die_ref);
3750 static void add_pubtype (tree, dw_die_ref);
3751 static void output_pubnames (vec<pubname_entry, va_gc> *);
3752 static void output_aranges (void);
3753 static unsigned int add_ranges (const_tree, bool = false);
3754 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3755 bool *, bool);
3756 static void output_ranges (void);
3757 static dw_line_info_table *new_line_info_table (void);
3758 static void output_line_info (bool);
3759 static void output_file_names (void);
3760 static dw_die_ref base_type_die (tree, bool);
3761 static int is_base_type (tree);
3762 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3763 static int decl_quals (const_tree);
3764 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3765 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3766 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3767 static int type_is_enum (const_tree);
3768 static unsigned int dbx_reg_number (const_rtx);
3769 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3770 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3771 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3772 enum var_init_status);
3773 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3774 enum var_init_status);
3775 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3776 enum var_init_status);
3777 static int is_based_loc (const_rtx);
3778 static bool resolve_one_addr (rtx *);
3779 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3780 enum var_init_status);
3781 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3782 enum var_init_status);
3783 struct loc_descr_context;
3784 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3785 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3786 static dw_loc_list_ref loc_list_from_tree (tree, int,
3787 struct loc_descr_context *);
3788 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3789 struct loc_descr_context *);
3790 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3791 static tree field_type (const_tree);
3792 static unsigned int simple_type_align_in_bits (const_tree);
3793 static unsigned int simple_decl_align_in_bits (const_tree);
3794 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3795 struct vlr_context;
3796 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3797 HOST_WIDE_INT *);
3798 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3799 dw_loc_list_ref);
3800 static void add_data_member_location_attribute (dw_die_ref, tree,
3801 struct vlr_context *);
3802 static bool add_const_value_attribute (dw_die_ref, rtx);
3803 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3804 static void insert_wide_int (const wide_int &, unsigned char *, int);
3805 static void insert_float (const_rtx, unsigned char *);
3806 static rtx rtl_for_decl_location (tree);
3807 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3808 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3809 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3810 static void add_name_attribute (dw_die_ref, const char *);
3811 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3812 static void add_comp_dir_attribute (dw_die_ref);
3813 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3814 struct loc_descr_context *);
3815 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3816 struct loc_descr_context *);
3817 static void add_subscript_info (dw_die_ref, tree, bool);
3818 static void add_byte_size_attribute (dw_die_ref, tree);
3819 static void add_alignment_attribute (dw_die_ref, tree);
3820 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3821 struct vlr_context *);
3822 static void add_bit_size_attribute (dw_die_ref, tree);
3823 static void add_prototyped_attribute (dw_die_ref, tree);
3824 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3825 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3826 static void add_src_coords_attributes (dw_die_ref, tree);
3827 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3828 static void add_discr_value (dw_die_ref, dw_discr_value *);
3829 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3830 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3831 static dw_die_ref scope_die_for (tree, dw_die_ref);
3832 static inline int local_scope_p (dw_die_ref);
3833 static inline int class_scope_p (dw_die_ref);
3834 static inline int class_or_namespace_scope_p (dw_die_ref);
3835 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3836 static void add_calling_convention_attribute (dw_die_ref, tree);
3837 static const char *type_tag (const_tree);
3838 static tree member_declared_type (const_tree);
3839 #if 0
3840 static const char *decl_start_label (tree);
3841 #endif
3842 static void gen_array_type_die (tree, dw_die_ref);
3843 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3844 #if 0
3845 static void gen_entry_point_die (tree, dw_die_ref);
3846 #endif
3847 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3848 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3849 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3850 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3851 static void gen_formal_types_die (tree, dw_die_ref);
3852 static void gen_subprogram_die (tree, dw_die_ref);
3853 static void gen_variable_die (tree, tree, dw_die_ref);
3854 static void gen_const_die (tree, dw_die_ref);
3855 static void gen_label_die (tree, dw_die_ref);
3856 static void gen_lexical_block_die (tree, dw_die_ref);
3857 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3858 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3859 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3860 static dw_die_ref gen_compile_unit_die (const char *);
3861 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3862 static void gen_member_die (tree, dw_die_ref);
3863 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3864 enum debug_info_usage);
3865 static void gen_subroutine_type_die (tree, dw_die_ref);
3866 static void gen_typedef_die (tree, dw_die_ref);
3867 static void gen_type_die (tree, dw_die_ref);
3868 static void gen_block_die (tree, dw_die_ref);
3869 static void decls_for_scope (tree, dw_die_ref);
3870 static bool is_naming_typedef_decl (const_tree);
3871 static inline dw_die_ref get_context_die (tree);
3872 static void gen_namespace_die (tree, dw_die_ref);
3873 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3874 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3875 static dw_die_ref force_decl_die (tree);
3876 static dw_die_ref force_type_die (tree);
3877 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3878 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3879 static struct dwarf_file_data * lookup_filename (const char *);
3880 static void retry_incomplete_types (void);
3881 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3882 static void gen_generic_params_dies (tree);
3883 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3884 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3885 static void splice_child_die (dw_die_ref, dw_die_ref);
3886 static int file_info_cmp (const void *, const void *);
3887 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3888 const char *, var_loc_view, const char *);
3889 static void output_loc_list (dw_loc_list_ref);
3890 static char *gen_internal_sym (const char *);
3891 static bool want_pubnames (void);
3892
3893 static void prune_unmark_dies (dw_die_ref);
3894 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3895 static void prune_unused_types_mark (dw_die_ref, int);
3896 static void prune_unused_types_walk (dw_die_ref);
3897 static void prune_unused_types_walk_attribs (dw_die_ref);
3898 static void prune_unused_types_prune (dw_die_ref);
3899 static void prune_unused_types (void);
3900 static int maybe_emit_file (struct dwarf_file_data *fd);
3901 static inline const char *AT_vms_delta1 (dw_attr_node *);
3902 static inline const char *AT_vms_delta2 (dw_attr_node *);
3903 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3904 const char *, const char *);
3905 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3906 static void gen_remaining_tmpl_value_param_die_attribute (void);
3907 static bool generic_type_p (tree);
3908 static void schedule_generic_params_dies_gen (tree t);
3909 static void gen_scheduled_generic_parms_dies (void);
3910 static void resolve_variable_values (void);
3911
3912 static const char *comp_dir_string (void);
3913
3914 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3915
3916 /* enum for tracking thread-local variables whose address is really an offset
3917 relative to the TLS pointer, which will need link-time relocation, but will
3918 not need relocation by the DWARF consumer. */
3919
3920 enum dtprel_bool
3921 {
3922 dtprel_false = 0,
3923 dtprel_true = 1
3924 };
3925
3926 /* Return the operator to use for an address of a variable. For dtprel_true, we
3927 use DW_OP_const*. For regular variables, which need both link-time
3928 relocation and consumer-level relocation (e.g., to account for shared objects
3929 loaded at a random address), we use DW_OP_addr*. */
3930
3931 static inline enum dwarf_location_atom
3932 dw_addr_op (enum dtprel_bool dtprel)
3933 {
3934 if (dtprel == dtprel_true)
3935 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3936 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3937 else
3938 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3939 }
3940
3941 /* Return a pointer to a newly allocated address location description. If
3942 dwarf_split_debug_info is true, then record the address with the appropriate
3943 relocation. */
3944 static inline dw_loc_descr_ref
3945 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3946 {
3947 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3948
3949 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3950 ref->dw_loc_oprnd1.v.val_addr = addr;
3951 ref->dtprel = dtprel;
3952 if (dwarf_split_debug_info)
3953 ref->dw_loc_oprnd1.val_entry
3954 = add_addr_table_entry (addr,
3955 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3956 else
3957 ref->dw_loc_oprnd1.val_entry = NULL;
3958
3959 return ref;
3960 }
3961
3962 /* Section names used to hold DWARF debugging information. */
3963
3964 #ifndef DEBUG_INFO_SECTION
3965 #define DEBUG_INFO_SECTION ".debug_info"
3966 #endif
3967 #ifndef DEBUG_DWO_INFO_SECTION
3968 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3969 #endif
3970 #ifndef DEBUG_LTO_INFO_SECTION
3971 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3972 #endif
3973 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3974 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3975 #endif
3976 #ifndef DEBUG_ABBREV_SECTION
3977 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3978 #endif
3979 #ifndef DEBUG_LTO_ABBREV_SECTION
3980 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3981 #endif
3982 #ifndef DEBUG_DWO_ABBREV_SECTION
3983 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3984 #endif
3985 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3986 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3987 #endif
3988 #ifndef DEBUG_ARANGES_SECTION
3989 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3990 #endif
3991 #ifndef DEBUG_ADDR_SECTION
3992 #define DEBUG_ADDR_SECTION ".debug_addr"
3993 #endif
3994 #ifndef DEBUG_MACINFO_SECTION
3995 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3996 #endif
3997 #ifndef DEBUG_LTO_MACINFO_SECTION
3998 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3999 #endif
4000 #ifndef DEBUG_DWO_MACINFO_SECTION
4001 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4002 #endif
4003 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4004 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4005 #endif
4006 #ifndef DEBUG_MACRO_SECTION
4007 #define DEBUG_MACRO_SECTION ".debug_macro"
4008 #endif
4009 #ifndef DEBUG_LTO_MACRO_SECTION
4010 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4011 #endif
4012 #ifndef DEBUG_DWO_MACRO_SECTION
4013 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4014 #endif
4015 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4016 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4017 #endif
4018 #ifndef DEBUG_LINE_SECTION
4019 #define DEBUG_LINE_SECTION ".debug_line"
4020 #endif
4021 #ifndef DEBUG_LTO_LINE_SECTION
4022 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4023 #endif
4024 #ifndef DEBUG_DWO_LINE_SECTION
4025 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4026 #endif
4027 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4028 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4029 #endif
4030 #ifndef DEBUG_LOC_SECTION
4031 #define DEBUG_LOC_SECTION ".debug_loc"
4032 #endif
4033 #ifndef DEBUG_DWO_LOC_SECTION
4034 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4035 #endif
4036 #ifndef DEBUG_LOCLISTS_SECTION
4037 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4038 #endif
4039 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4040 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4041 #endif
4042 #ifndef DEBUG_PUBNAMES_SECTION
4043 #define DEBUG_PUBNAMES_SECTION \
4044 ((debug_generate_pub_sections == 2) \
4045 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4046 #endif
4047 #ifndef DEBUG_PUBTYPES_SECTION
4048 #define DEBUG_PUBTYPES_SECTION \
4049 ((debug_generate_pub_sections == 2) \
4050 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4051 #endif
4052 #ifndef DEBUG_STR_OFFSETS_SECTION
4053 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4054 #endif
4055 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4056 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4057 #endif
4058 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4059 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4060 #endif
4061 #ifndef DEBUG_STR_SECTION
4062 #define DEBUG_STR_SECTION ".debug_str"
4063 #endif
4064 #ifndef DEBUG_LTO_STR_SECTION
4065 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4066 #endif
4067 #ifndef DEBUG_STR_DWO_SECTION
4068 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4069 #endif
4070 #ifndef DEBUG_LTO_STR_DWO_SECTION
4071 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4072 #endif
4073 #ifndef DEBUG_RANGES_SECTION
4074 #define DEBUG_RANGES_SECTION ".debug_ranges"
4075 #endif
4076 #ifndef DEBUG_RNGLISTS_SECTION
4077 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4078 #endif
4079 #ifndef DEBUG_LINE_STR_SECTION
4080 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4081 #endif
4082 #ifndef DEBUG_LTO_LINE_STR_SECTION
4083 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4084 #endif
4085
4086 /* Standard ELF section names for compiled code and data. */
4087 #ifndef TEXT_SECTION_NAME
4088 #define TEXT_SECTION_NAME ".text"
4089 #endif
4090
4091 /* Section flags for .debug_str section. */
4092 #define DEBUG_STR_SECTION_FLAGS \
4093 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4094 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4095 : SECTION_DEBUG)
4096
4097 /* Section flags for .debug_str.dwo section. */
4098 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4099
4100 /* Attribute used to refer to the macro section. */
4101 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4102 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4103
4104 /* Labels we insert at beginning sections we can reference instead of
4105 the section names themselves. */
4106
4107 #ifndef TEXT_SECTION_LABEL
4108 #define TEXT_SECTION_LABEL "Ltext"
4109 #endif
4110 #ifndef COLD_TEXT_SECTION_LABEL
4111 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4112 #endif
4113 #ifndef DEBUG_LINE_SECTION_LABEL
4114 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4115 #endif
4116 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4117 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4118 #endif
4119 #ifndef DEBUG_INFO_SECTION_LABEL
4120 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4121 #endif
4122 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4123 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4124 #endif
4125 #ifndef DEBUG_ABBREV_SECTION_LABEL
4126 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4127 #endif
4128 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4129 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4130 #endif
4131 #ifndef DEBUG_ADDR_SECTION_LABEL
4132 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4133 #endif
4134 #ifndef DEBUG_LOC_SECTION_LABEL
4135 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4136 #endif
4137 #ifndef DEBUG_RANGES_SECTION_LABEL
4138 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4139 #endif
4140 #ifndef DEBUG_MACINFO_SECTION_LABEL
4141 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4142 #endif
4143 #ifndef DEBUG_MACRO_SECTION_LABEL
4144 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4145 #endif
4146 #define SKELETON_COMP_DIE_ABBREV 1
4147 #define SKELETON_TYPE_DIE_ABBREV 2
4148
4149 /* Definitions of defaults for formats and names of various special
4150 (artificial) labels which may be generated within this file (when the -g
4151 options is used and DWARF2_DEBUGGING_INFO is in effect.
4152 If necessary, these may be overridden from within the tm.h file, but
4153 typically, overriding these defaults is unnecessary. */
4154
4155 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4156 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4157 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4158 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4159 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4160 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4161 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4162 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4163 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4164 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4170
4171 #ifndef TEXT_END_LABEL
4172 #define TEXT_END_LABEL "Letext"
4173 #endif
4174 #ifndef COLD_END_LABEL
4175 #define COLD_END_LABEL "Letext_cold"
4176 #endif
4177 #ifndef BLOCK_BEGIN_LABEL
4178 #define BLOCK_BEGIN_LABEL "LBB"
4179 #endif
4180 #ifndef BLOCK_INLINE_ENTRY_LABEL
4181 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4182 #endif
4183 #ifndef BLOCK_END_LABEL
4184 #define BLOCK_END_LABEL "LBE"
4185 #endif
4186 #ifndef LINE_CODE_LABEL
4187 #define LINE_CODE_LABEL "LM"
4188 #endif
4189
4190 \f
4191 /* Return the root of the DIE's built for the current compilation unit. */
4192 static dw_die_ref
4193 comp_unit_die (void)
4194 {
4195 if (!single_comp_unit_die)
4196 single_comp_unit_die = gen_compile_unit_die (NULL);
4197 return single_comp_unit_die;
4198 }
4199
4200 /* We allow a language front-end to designate a function that is to be
4201 called to "demangle" any name before it is put into a DIE. */
4202
4203 static const char *(*demangle_name_func) (const char *);
4204
4205 void
4206 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4207 {
4208 demangle_name_func = func;
4209 }
4210
4211 /* Test if rtl node points to a pseudo register. */
4212
4213 static inline int
4214 is_pseudo_reg (const_rtx rtl)
4215 {
4216 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4217 || (GET_CODE (rtl) == SUBREG
4218 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4219 }
4220
4221 /* Return a reference to a type, with its const and volatile qualifiers
4222 removed. */
4223
4224 static inline tree
4225 type_main_variant (tree type)
4226 {
4227 type = TYPE_MAIN_VARIANT (type);
4228
4229 /* ??? There really should be only one main variant among any group of
4230 variants of a given type (and all of the MAIN_VARIANT values for all
4231 members of the group should point to that one type) but sometimes the C
4232 front-end messes this up for array types, so we work around that bug
4233 here. */
4234 if (TREE_CODE (type) == ARRAY_TYPE)
4235 while (type != TYPE_MAIN_VARIANT (type))
4236 type = TYPE_MAIN_VARIANT (type);
4237
4238 return type;
4239 }
4240
4241 /* Return nonzero if the given type node represents a tagged type. */
4242
4243 static inline int
4244 is_tagged_type (const_tree type)
4245 {
4246 enum tree_code code = TREE_CODE (type);
4247
4248 return (code == RECORD_TYPE || code == UNION_TYPE
4249 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4250 }
4251
4252 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4253
4254 static void
4255 get_ref_die_offset_label (char *label, dw_die_ref ref)
4256 {
4257 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4258 }
4259
4260 /* Return die_offset of a DIE reference to a base type. */
4261
4262 static unsigned long int
4263 get_base_type_offset (dw_die_ref ref)
4264 {
4265 if (ref->die_offset)
4266 return ref->die_offset;
4267 if (comp_unit_die ()->die_abbrev)
4268 {
4269 calc_base_type_die_sizes ();
4270 gcc_assert (ref->die_offset);
4271 }
4272 return ref->die_offset;
4273 }
4274
4275 /* Return die_offset of a DIE reference other than base type. */
4276
4277 static unsigned long int
4278 get_ref_die_offset (dw_die_ref ref)
4279 {
4280 gcc_assert (ref->die_offset);
4281 return ref->die_offset;
4282 }
4283
4284 /* Convert a DIE tag into its string name. */
4285
4286 static const char *
4287 dwarf_tag_name (unsigned int tag)
4288 {
4289 const char *name = get_DW_TAG_name (tag);
4290
4291 if (name != NULL)
4292 return name;
4293
4294 return "DW_TAG_<unknown>";
4295 }
4296
4297 /* Convert a DWARF attribute code into its string name. */
4298
4299 static const char *
4300 dwarf_attr_name (unsigned int attr)
4301 {
4302 const char *name;
4303
4304 switch (attr)
4305 {
4306 #if VMS_DEBUGGING_INFO
4307 case DW_AT_HP_prologue:
4308 return "DW_AT_HP_prologue";
4309 #else
4310 case DW_AT_MIPS_loop_unroll_factor:
4311 return "DW_AT_MIPS_loop_unroll_factor";
4312 #endif
4313
4314 #if VMS_DEBUGGING_INFO
4315 case DW_AT_HP_epilogue:
4316 return "DW_AT_HP_epilogue";
4317 #else
4318 case DW_AT_MIPS_stride:
4319 return "DW_AT_MIPS_stride";
4320 #endif
4321 }
4322
4323 name = get_DW_AT_name (attr);
4324
4325 if (name != NULL)
4326 return name;
4327
4328 return "DW_AT_<unknown>";
4329 }
4330
4331 /* Convert a DWARF value form code into its string name. */
4332
4333 static const char *
4334 dwarf_form_name (unsigned int form)
4335 {
4336 const char *name = get_DW_FORM_name (form);
4337
4338 if (name != NULL)
4339 return name;
4340
4341 return "DW_FORM_<unknown>";
4342 }
4343 \f
4344 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4345 instance of an inlined instance of a decl which is local to an inline
4346 function, so we have to trace all of the way back through the origin chain
4347 to find out what sort of node actually served as the original seed for the
4348 given block. */
4349
4350 static tree
4351 decl_ultimate_origin (const_tree decl)
4352 {
4353 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4354 return NULL_TREE;
4355
4356 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4357 we're trying to output the abstract instance of this function. */
4358 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4359 return NULL_TREE;
4360
4361 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4362 most distant ancestor, this should never happen. */
4363 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4364
4365 return DECL_ABSTRACT_ORIGIN (decl);
4366 }
4367
4368 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4369 of a virtual function may refer to a base class, so we check the 'this'
4370 parameter. */
4371
4372 static tree
4373 decl_class_context (tree decl)
4374 {
4375 tree context = NULL_TREE;
4376
4377 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4378 context = DECL_CONTEXT (decl);
4379 else
4380 context = TYPE_MAIN_VARIANT
4381 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4382
4383 if (context && !TYPE_P (context))
4384 context = NULL_TREE;
4385
4386 return context;
4387 }
4388 \f
4389 /* Add an attribute/value pair to a DIE. */
4390
4391 static inline void
4392 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4393 {
4394 /* Maybe this should be an assert? */
4395 if (die == NULL)
4396 return;
4397
4398 if (flag_checking)
4399 {
4400 /* Check we do not add duplicate attrs. Can't use get_AT here
4401 because that recurses to the specification/abstract origin DIE. */
4402 dw_attr_node *a;
4403 unsigned ix;
4404 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4405 gcc_assert (a->dw_attr != attr->dw_attr);
4406 }
4407
4408 vec_safe_reserve (die->die_attr, 1);
4409 vec_safe_push (die->die_attr, *attr);
4410 }
4411
4412 static inline enum dw_val_class
4413 AT_class (dw_attr_node *a)
4414 {
4415 return a->dw_attr_val.val_class;
4416 }
4417
4418 /* Return the index for any attribute that will be referenced with a
4419 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4420 indices are stored in dw_attr_val.v.val_str for reference counting
4421 pruning. */
4422
4423 static inline unsigned int
4424 AT_index (dw_attr_node *a)
4425 {
4426 if (AT_class (a) == dw_val_class_str)
4427 return a->dw_attr_val.v.val_str->index;
4428 else if (a->dw_attr_val.val_entry != NULL)
4429 return a->dw_attr_val.val_entry->index;
4430 return NOT_INDEXED;
4431 }
4432
4433 /* Add a flag value attribute to a DIE. */
4434
4435 static inline void
4436 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4437 {
4438 dw_attr_node attr;
4439
4440 attr.dw_attr = attr_kind;
4441 attr.dw_attr_val.val_class = dw_val_class_flag;
4442 attr.dw_attr_val.val_entry = NULL;
4443 attr.dw_attr_val.v.val_flag = flag;
4444 add_dwarf_attr (die, &attr);
4445 }
4446
4447 static inline unsigned
4448 AT_flag (dw_attr_node *a)
4449 {
4450 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4451 return a->dw_attr_val.v.val_flag;
4452 }
4453
4454 /* Add a signed integer attribute value to a DIE. */
4455
4456 static inline void
4457 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4458 {
4459 dw_attr_node attr;
4460
4461 attr.dw_attr = attr_kind;
4462 attr.dw_attr_val.val_class = dw_val_class_const;
4463 attr.dw_attr_val.val_entry = NULL;
4464 attr.dw_attr_val.v.val_int = int_val;
4465 add_dwarf_attr (die, &attr);
4466 }
4467
4468 static inline HOST_WIDE_INT
4469 AT_int (dw_attr_node *a)
4470 {
4471 gcc_assert (a && (AT_class (a) == dw_val_class_const
4472 || AT_class (a) == dw_val_class_const_implicit));
4473 return a->dw_attr_val.v.val_int;
4474 }
4475
4476 /* Add an unsigned integer attribute value to a DIE. */
4477
4478 static inline void
4479 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4480 unsigned HOST_WIDE_INT unsigned_val)
4481 {
4482 dw_attr_node attr;
4483
4484 attr.dw_attr = attr_kind;
4485 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4486 attr.dw_attr_val.val_entry = NULL;
4487 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4488 add_dwarf_attr (die, &attr);
4489 }
4490
4491 static inline unsigned HOST_WIDE_INT
4492 AT_unsigned (dw_attr_node *a)
4493 {
4494 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4495 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4496 return a->dw_attr_val.v.val_unsigned;
4497 }
4498
4499 /* Add an unsigned wide integer attribute value to a DIE. */
4500
4501 static inline void
4502 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4503 const wide_int& w)
4504 {
4505 dw_attr_node attr;
4506
4507 attr.dw_attr = attr_kind;
4508 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4509 attr.dw_attr_val.val_entry = NULL;
4510 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4511 *attr.dw_attr_val.v.val_wide = w;
4512 add_dwarf_attr (die, &attr);
4513 }
4514
4515 /* Add an unsigned double integer attribute value to a DIE. */
4516
4517 static inline void
4518 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4519 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4520 {
4521 dw_attr_node attr;
4522
4523 attr.dw_attr = attr_kind;
4524 attr.dw_attr_val.val_class = dw_val_class_const_double;
4525 attr.dw_attr_val.val_entry = NULL;
4526 attr.dw_attr_val.v.val_double.high = high;
4527 attr.dw_attr_val.v.val_double.low = low;
4528 add_dwarf_attr (die, &attr);
4529 }
4530
4531 /* Add a floating point attribute value to a DIE and return it. */
4532
4533 static inline void
4534 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4535 unsigned int length, unsigned int elt_size, unsigned char *array)
4536 {
4537 dw_attr_node attr;
4538
4539 attr.dw_attr = attr_kind;
4540 attr.dw_attr_val.val_class = dw_val_class_vec;
4541 attr.dw_attr_val.val_entry = NULL;
4542 attr.dw_attr_val.v.val_vec.length = length;
4543 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4544 attr.dw_attr_val.v.val_vec.array = array;
4545 add_dwarf_attr (die, &attr);
4546 }
4547
4548 /* Add an 8-byte data attribute value to a DIE. */
4549
4550 static inline void
4551 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4552 unsigned char data8[8])
4553 {
4554 dw_attr_node attr;
4555
4556 attr.dw_attr = attr_kind;
4557 attr.dw_attr_val.val_class = dw_val_class_data8;
4558 attr.dw_attr_val.val_entry = NULL;
4559 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4560 add_dwarf_attr (die, &attr);
4561 }
4562
4563 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4564 dwarf_split_debug_info, address attributes in dies destined for the
4565 final executable have force_direct set to avoid using indexed
4566 references. */
4567
4568 static inline void
4569 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4570 bool force_direct)
4571 {
4572 dw_attr_node attr;
4573 char * lbl_id;
4574
4575 lbl_id = xstrdup (lbl_low);
4576 attr.dw_attr = DW_AT_low_pc;
4577 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4578 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4579 if (dwarf_split_debug_info && !force_direct)
4580 attr.dw_attr_val.val_entry
4581 = add_addr_table_entry (lbl_id, ate_kind_label);
4582 else
4583 attr.dw_attr_val.val_entry = NULL;
4584 add_dwarf_attr (die, &attr);
4585
4586 attr.dw_attr = DW_AT_high_pc;
4587 if (dwarf_version < 4)
4588 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4589 else
4590 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4591 lbl_id = xstrdup (lbl_high);
4592 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4593 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4594 && dwarf_split_debug_info && !force_direct)
4595 attr.dw_attr_val.val_entry
4596 = add_addr_table_entry (lbl_id, ate_kind_label);
4597 else
4598 attr.dw_attr_val.val_entry = NULL;
4599 add_dwarf_attr (die, &attr);
4600 }
4601
4602 /* Hash and equality functions for debug_str_hash. */
4603
4604 hashval_t
4605 indirect_string_hasher::hash (indirect_string_node *x)
4606 {
4607 return htab_hash_string (x->str);
4608 }
4609
4610 bool
4611 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4612 {
4613 return strcmp (x1->str, x2) == 0;
4614 }
4615
4616 /* Add STR to the given string hash table. */
4617
4618 static struct indirect_string_node *
4619 find_AT_string_in_table (const char *str,
4620 hash_table<indirect_string_hasher> *table)
4621 {
4622 struct indirect_string_node *node;
4623
4624 indirect_string_node **slot
4625 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4626 if (*slot == NULL)
4627 {
4628 node = ggc_cleared_alloc<indirect_string_node> ();
4629 node->str = ggc_strdup (str);
4630 *slot = node;
4631 }
4632 else
4633 node = *slot;
4634
4635 node->refcount++;
4636 return node;
4637 }
4638
4639 /* Add STR to the indirect string hash table. */
4640
4641 static struct indirect_string_node *
4642 find_AT_string (const char *str)
4643 {
4644 if (! debug_str_hash)
4645 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4646
4647 return find_AT_string_in_table (str, debug_str_hash);
4648 }
4649
4650 /* Add a string attribute value to a DIE. */
4651
4652 static inline void
4653 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4654 {
4655 dw_attr_node attr;
4656 struct indirect_string_node *node;
4657
4658 node = find_AT_string (str);
4659
4660 attr.dw_attr = attr_kind;
4661 attr.dw_attr_val.val_class = dw_val_class_str;
4662 attr.dw_attr_val.val_entry = NULL;
4663 attr.dw_attr_val.v.val_str = node;
4664 add_dwarf_attr (die, &attr);
4665 }
4666
4667 static inline const char *
4668 AT_string (dw_attr_node *a)
4669 {
4670 gcc_assert (a && AT_class (a) == dw_val_class_str);
4671 return a->dw_attr_val.v.val_str->str;
4672 }
4673
4674 /* Call this function directly to bypass AT_string_form's logic to put
4675 the string inline in the die. */
4676
4677 static void
4678 set_indirect_string (struct indirect_string_node *node)
4679 {
4680 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4681 /* Already indirect is a no op. */
4682 if (node->form == DW_FORM_strp
4683 || node->form == DW_FORM_line_strp
4684 || node->form == dwarf_FORM (DW_FORM_strx))
4685 {
4686 gcc_assert (node->label);
4687 return;
4688 }
4689 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4690 ++dw2_string_counter;
4691 node->label = xstrdup (label);
4692
4693 if (!dwarf_split_debug_info)
4694 {
4695 node->form = DW_FORM_strp;
4696 node->index = NOT_INDEXED;
4697 }
4698 else
4699 {
4700 node->form = dwarf_FORM (DW_FORM_strx);
4701 node->index = NO_INDEX_ASSIGNED;
4702 }
4703 }
4704
4705 /* A helper function for dwarf2out_finish, called to reset indirect
4706 string decisions done for early LTO dwarf output before fat object
4707 dwarf output. */
4708
4709 int
4710 reset_indirect_string (indirect_string_node **h, void *)
4711 {
4712 struct indirect_string_node *node = *h;
4713 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4714 {
4715 free (node->label);
4716 node->label = NULL;
4717 node->form = (dwarf_form) 0;
4718 node->index = 0;
4719 }
4720 return 1;
4721 }
4722
4723 /* Find out whether a string should be output inline in DIE
4724 or out-of-line in .debug_str section. */
4725
4726 static enum dwarf_form
4727 find_string_form (struct indirect_string_node *node)
4728 {
4729 unsigned int len;
4730
4731 if (node->form)
4732 return node->form;
4733
4734 len = strlen (node->str) + 1;
4735
4736 /* If the string is shorter or equal to the size of the reference, it is
4737 always better to put it inline. */
4738 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4739 return node->form = DW_FORM_string;
4740
4741 /* If we cannot expect the linker to merge strings in .debug_str
4742 section, only put it into .debug_str if it is worth even in this
4743 single module. */
4744 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4745 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4746 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4747 return node->form = DW_FORM_string;
4748
4749 set_indirect_string (node);
4750
4751 return node->form;
4752 }
4753
4754 /* Find out whether the string referenced from the attribute should be
4755 output inline in DIE or out-of-line in .debug_str section. */
4756
4757 static enum dwarf_form
4758 AT_string_form (dw_attr_node *a)
4759 {
4760 gcc_assert (a && AT_class (a) == dw_val_class_str);
4761 return find_string_form (a->dw_attr_val.v.val_str);
4762 }
4763
4764 /* Add a DIE reference attribute value to a DIE. */
4765
4766 static inline void
4767 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4768 {
4769 dw_attr_node attr;
4770 gcc_checking_assert (targ_die != NULL);
4771
4772 /* With LTO we can end up trying to reference something we didn't create
4773 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4774 if (targ_die == NULL)
4775 return;
4776
4777 attr.dw_attr = attr_kind;
4778 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4779 attr.dw_attr_val.val_entry = NULL;
4780 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4781 attr.dw_attr_val.v.val_die_ref.external = 0;
4782 add_dwarf_attr (die, &attr);
4783 }
4784
4785 /* Change DIE reference REF to point to NEW_DIE instead. */
4786
4787 static inline void
4788 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4789 {
4790 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4791 ref->dw_attr_val.v.val_die_ref.die = new_die;
4792 ref->dw_attr_val.v.val_die_ref.external = 0;
4793 }
4794
4795 /* Add an AT_specification attribute to a DIE, and also make the back
4796 pointer from the specification to the definition. */
4797
4798 static inline void
4799 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4800 {
4801 add_AT_die_ref (die, DW_AT_specification, targ_die);
4802 gcc_assert (!targ_die->die_definition);
4803 targ_die->die_definition = die;
4804 }
4805
4806 static inline dw_die_ref
4807 AT_ref (dw_attr_node *a)
4808 {
4809 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4810 return a->dw_attr_val.v.val_die_ref.die;
4811 }
4812
4813 static inline int
4814 AT_ref_external (dw_attr_node *a)
4815 {
4816 if (a && AT_class (a) == dw_val_class_die_ref)
4817 return a->dw_attr_val.v.val_die_ref.external;
4818
4819 return 0;
4820 }
4821
4822 static inline void
4823 set_AT_ref_external (dw_attr_node *a, int i)
4824 {
4825 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4826 a->dw_attr_val.v.val_die_ref.external = i;
4827 }
4828
4829 /* Add an FDE reference attribute value to a DIE. */
4830
4831 static inline void
4832 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4833 {
4834 dw_attr_node attr;
4835
4836 attr.dw_attr = attr_kind;
4837 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4838 attr.dw_attr_val.val_entry = NULL;
4839 attr.dw_attr_val.v.val_fde_index = targ_fde;
4840 add_dwarf_attr (die, &attr);
4841 }
4842
4843 /* Add a location description attribute value to a DIE. */
4844
4845 static inline void
4846 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4847 {
4848 dw_attr_node attr;
4849
4850 attr.dw_attr = attr_kind;
4851 attr.dw_attr_val.val_class = dw_val_class_loc;
4852 attr.dw_attr_val.val_entry = NULL;
4853 attr.dw_attr_val.v.val_loc = loc;
4854 add_dwarf_attr (die, &attr);
4855 }
4856
4857 static inline dw_loc_descr_ref
4858 AT_loc (dw_attr_node *a)
4859 {
4860 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4861 return a->dw_attr_val.v.val_loc;
4862 }
4863
4864 static inline void
4865 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4866 {
4867 dw_attr_node attr;
4868
4869 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4870 return;
4871
4872 attr.dw_attr = attr_kind;
4873 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4874 attr.dw_attr_val.val_entry = NULL;
4875 attr.dw_attr_val.v.val_loc_list = loc_list;
4876 add_dwarf_attr (die, &attr);
4877 have_location_lists = true;
4878 }
4879
4880 static inline dw_loc_list_ref
4881 AT_loc_list (dw_attr_node *a)
4882 {
4883 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4884 return a->dw_attr_val.v.val_loc_list;
4885 }
4886
4887 /* Add a view list attribute to DIE. It must have a DW_AT_location
4888 attribute, because the view list complements the location list. */
4889
4890 static inline void
4891 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4892 {
4893 dw_attr_node attr;
4894
4895 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4896 return;
4897
4898 attr.dw_attr = attr_kind;
4899 attr.dw_attr_val.val_class = dw_val_class_view_list;
4900 attr.dw_attr_val.val_entry = NULL;
4901 attr.dw_attr_val.v.val_view_list = die;
4902 add_dwarf_attr (die, &attr);
4903 gcc_checking_assert (get_AT (die, DW_AT_location));
4904 gcc_assert (have_location_lists);
4905 }
4906
4907 /* Return a pointer to the location list referenced by the attribute.
4908 If the named attribute is a view list, look up the corresponding
4909 DW_AT_location attribute and return its location list. */
4910
4911 static inline dw_loc_list_ref *
4912 AT_loc_list_ptr (dw_attr_node *a)
4913 {
4914 gcc_assert (a);
4915 switch (AT_class (a))
4916 {
4917 case dw_val_class_loc_list:
4918 return &a->dw_attr_val.v.val_loc_list;
4919 case dw_val_class_view_list:
4920 {
4921 dw_attr_node *l;
4922 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4923 if (!l)
4924 return NULL;
4925 gcc_checking_assert (l + 1 == a);
4926 return AT_loc_list_ptr (l);
4927 }
4928 default:
4929 gcc_unreachable ();
4930 }
4931 }
4932
4933 /* Return the location attribute value associated with a view list
4934 attribute value. */
4935
4936 static inline dw_val_node *
4937 view_list_to_loc_list_val_node (dw_val_node *val)
4938 {
4939 gcc_assert (val->val_class == dw_val_class_view_list);
4940 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4941 if (!loc)
4942 return NULL;
4943 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4944 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4945 return &loc->dw_attr_val;
4946 }
4947
4948 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4949 {
4950 static hashval_t hash (addr_table_entry *);
4951 static bool equal (addr_table_entry *, addr_table_entry *);
4952 };
4953
4954 /* Table of entries into the .debug_addr section. */
4955
4956 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4957
4958 /* Hash an address_table_entry. */
4959
4960 hashval_t
4961 addr_hasher::hash (addr_table_entry *a)
4962 {
4963 inchash::hash hstate;
4964 switch (a->kind)
4965 {
4966 case ate_kind_rtx:
4967 hstate.add_int (0);
4968 break;
4969 case ate_kind_rtx_dtprel:
4970 hstate.add_int (1);
4971 break;
4972 case ate_kind_label:
4973 return htab_hash_string (a->addr.label);
4974 default:
4975 gcc_unreachable ();
4976 }
4977 inchash::add_rtx (a->addr.rtl, hstate);
4978 return hstate.end ();
4979 }
4980
4981 /* Determine equality for two address_table_entries. */
4982
4983 bool
4984 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4985 {
4986 if (a1->kind != a2->kind)
4987 return 0;
4988 switch (a1->kind)
4989 {
4990 case ate_kind_rtx:
4991 case ate_kind_rtx_dtprel:
4992 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4993 case ate_kind_label:
4994 return strcmp (a1->addr.label, a2->addr.label) == 0;
4995 default:
4996 gcc_unreachable ();
4997 }
4998 }
4999
5000 /* Initialize an addr_table_entry. */
5001
5002 void
5003 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
5004 {
5005 e->kind = kind;
5006 switch (kind)
5007 {
5008 case ate_kind_rtx:
5009 case ate_kind_rtx_dtprel:
5010 e->addr.rtl = (rtx) addr;
5011 break;
5012 case ate_kind_label:
5013 e->addr.label = (char *) addr;
5014 break;
5015 }
5016 e->refcount = 0;
5017 e->index = NO_INDEX_ASSIGNED;
5018 }
5019
5020 /* Add attr to the address table entry to the table. Defer setting an
5021 index until output time. */
5022
5023 static addr_table_entry *
5024 add_addr_table_entry (void *addr, enum ate_kind kind)
5025 {
5026 addr_table_entry *node;
5027 addr_table_entry finder;
5028
5029 gcc_assert (dwarf_split_debug_info);
5030 if (! addr_index_table)
5031 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5032 init_addr_table_entry (&finder, kind, addr);
5033 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5034
5035 if (*slot == HTAB_EMPTY_ENTRY)
5036 {
5037 node = ggc_cleared_alloc<addr_table_entry> ();
5038 init_addr_table_entry (node, kind, addr);
5039 *slot = node;
5040 }
5041 else
5042 node = *slot;
5043
5044 node->refcount++;
5045 return node;
5046 }
5047
5048 /* Remove an entry from the addr table by decrementing its refcount.
5049 Strictly, decrementing the refcount would be enough, but the
5050 assertion that the entry is actually in the table has found
5051 bugs. */
5052
5053 static void
5054 remove_addr_table_entry (addr_table_entry *entry)
5055 {
5056 gcc_assert (dwarf_split_debug_info && addr_index_table);
5057 /* After an index is assigned, the table is frozen. */
5058 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5059 entry->refcount--;
5060 }
5061
5062 /* Given a location list, remove all addresses it refers to from the
5063 address_table. */
5064
5065 static void
5066 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5067 {
5068 for (; descr; descr = descr->dw_loc_next)
5069 if (descr->dw_loc_oprnd1.val_entry != NULL)
5070 {
5071 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5072 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5073 }
5074 }
5075
5076 /* A helper function for dwarf2out_finish called through
5077 htab_traverse. Assign an addr_table_entry its index. All entries
5078 must be collected into the table when this function is called,
5079 because the indexing code relies on htab_traverse to traverse nodes
5080 in the same order for each run. */
5081
5082 int
5083 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5084 {
5085 addr_table_entry *node = *h;
5086
5087 /* Don't index unreferenced nodes. */
5088 if (node->refcount == 0)
5089 return 1;
5090
5091 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5092 node->index = *index;
5093 *index += 1;
5094
5095 return 1;
5096 }
5097
5098 /* Add an address constant attribute value to a DIE. When using
5099 dwarf_split_debug_info, address attributes in dies destined for the
5100 final executable should be direct references--setting the parameter
5101 force_direct ensures this behavior. */
5102
5103 static inline void
5104 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5105 bool force_direct)
5106 {
5107 dw_attr_node attr;
5108
5109 attr.dw_attr = attr_kind;
5110 attr.dw_attr_val.val_class = dw_val_class_addr;
5111 attr.dw_attr_val.v.val_addr = addr;
5112 if (dwarf_split_debug_info && !force_direct)
5113 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5114 else
5115 attr.dw_attr_val.val_entry = NULL;
5116 add_dwarf_attr (die, &attr);
5117 }
5118
5119 /* Get the RTX from to an address DIE attribute. */
5120
5121 static inline rtx
5122 AT_addr (dw_attr_node *a)
5123 {
5124 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5125 return a->dw_attr_val.v.val_addr;
5126 }
5127
5128 /* Add a file attribute value to a DIE. */
5129
5130 static inline void
5131 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5132 struct dwarf_file_data *fd)
5133 {
5134 dw_attr_node attr;
5135
5136 attr.dw_attr = attr_kind;
5137 attr.dw_attr_val.val_class = dw_val_class_file;
5138 attr.dw_attr_val.val_entry = NULL;
5139 attr.dw_attr_val.v.val_file = fd;
5140 add_dwarf_attr (die, &attr);
5141 }
5142
5143 /* Get the dwarf_file_data from a file DIE attribute. */
5144
5145 static inline struct dwarf_file_data *
5146 AT_file (dw_attr_node *a)
5147 {
5148 gcc_assert (a && (AT_class (a) == dw_val_class_file
5149 || AT_class (a) == dw_val_class_file_implicit));
5150 return a->dw_attr_val.v.val_file;
5151 }
5152
5153 /* Add a vms delta attribute value to a DIE. */
5154
5155 static inline void
5156 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5157 const char *lbl1, const char *lbl2)
5158 {
5159 dw_attr_node attr;
5160
5161 attr.dw_attr = attr_kind;
5162 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5163 attr.dw_attr_val.val_entry = NULL;
5164 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5165 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5166 add_dwarf_attr (die, &attr);
5167 }
5168
5169 /* Add a symbolic view identifier attribute value to a DIE. */
5170
5171 static inline void
5172 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5173 const char *view_label)
5174 {
5175 dw_attr_node attr;
5176
5177 attr.dw_attr = attr_kind;
5178 attr.dw_attr_val.val_class = dw_val_class_symview;
5179 attr.dw_attr_val.val_entry = NULL;
5180 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5181 add_dwarf_attr (die, &attr);
5182 }
5183
5184 /* Add a label identifier attribute value to a DIE. */
5185
5186 static inline void
5187 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5188 const char *lbl_id)
5189 {
5190 dw_attr_node attr;
5191
5192 attr.dw_attr = attr_kind;
5193 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5194 attr.dw_attr_val.val_entry = NULL;
5195 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5196 if (dwarf_split_debug_info)
5197 attr.dw_attr_val.val_entry
5198 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5199 ate_kind_label);
5200 add_dwarf_attr (die, &attr);
5201 }
5202
5203 /* Add a section offset attribute value to a DIE, an offset into the
5204 debug_line section. */
5205
5206 static inline void
5207 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5208 const char *label)
5209 {
5210 dw_attr_node attr;
5211
5212 attr.dw_attr = attr_kind;
5213 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5214 attr.dw_attr_val.val_entry = NULL;
5215 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5216 add_dwarf_attr (die, &attr);
5217 }
5218
5219 /* Add a section offset attribute value to a DIE, an offset into the
5220 debug_loclists section. */
5221
5222 static inline void
5223 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5224 const char *label)
5225 {
5226 dw_attr_node attr;
5227
5228 attr.dw_attr = attr_kind;
5229 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
5230 attr.dw_attr_val.val_entry = NULL;
5231 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5232 add_dwarf_attr (die, &attr);
5233 }
5234
5235 /* Add a section offset attribute value to a DIE, an offset into the
5236 debug_macinfo section. */
5237
5238 static inline void
5239 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5240 const char *label)
5241 {
5242 dw_attr_node attr;
5243
5244 attr.dw_attr = attr_kind;
5245 attr.dw_attr_val.val_class = dw_val_class_macptr;
5246 attr.dw_attr_val.val_entry = NULL;
5247 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5248 add_dwarf_attr (die, &attr);
5249 }
5250
5251 /* Add an offset attribute value to a DIE. */
5252
5253 static inline void
5254 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
5255 unsigned HOST_WIDE_INT offset)
5256 {
5257 dw_attr_node attr;
5258
5259 attr.dw_attr = attr_kind;
5260 attr.dw_attr_val.val_class = dw_val_class_offset;
5261 attr.dw_attr_val.val_entry = NULL;
5262 attr.dw_attr_val.v.val_offset = offset;
5263 add_dwarf_attr (die, &attr);
5264 }
5265
5266 /* Add a range_list attribute value to a DIE. When using
5267 dwarf_split_debug_info, address attributes in dies destined for the
5268 final executable should be direct references--setting the parameter
5269 force_direct ensures this behavior. */
5270
5271 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5272 #define RELOCATED_OFFSET (NULL)
5273
5274 static void
5275 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5276 long unsigned int offset, bool force_direct)
5277 {
5278 dw_attr_node attr;
5279
5280 attr.dw_attr = attr_kind;
5281 attr.dw_attr_val.val_class = dw_val_class_range_list;
5282 /* For the range_list attribute, use val_entry to store whether the
5283 offset should follow split-debug-info or normal semantics. This
5284 value is read in output_range_list_offset. */
5285 if (dwarf_split_debug_info && !force_direct)
5286 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5287 else
5288 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5289 attr.dw_attr_val.v.val_offset = offset;
5290 add_dwarf_attr (die, &attr);
5291 }
5292
5293 /* Return the start label of a delta attribute. */
5294
5295 static inline const char *
5296 AT_vms_delta1 (dw_attr_node *a)
5297 {
5298 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5299 return a->dw_attr_val.v.val_vms_delta.lbl1;
5300 }
5301
5302 /* Return the end label of a delta attribute. */
5303
5304 static inline const char *
5305 AT_vms_delta2 (dw_attr_node *a)
5306 {
5307 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5308 return a->dw_attr_val.v.val_vms_delta.lbl2;
5309 }
5310
5311 static inline const char *
5312 AT_lbl (dw_attr_node *a)
5313 {
5314 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5315 || AT_class (a) == dw_val_class_lineptr
5316 || AT_class (a) == dw_val_class_macptr
5317 || AT_class (a) == dw_val_class_loclistsptr
5318 || AT_class (a) == dw_val_class_high_pc));
5319 return a->dw_attr_val.v.val_lbl_id;
5320 }
5321
5322 /* Get the attribute of type attr_kind. */
5323
5324 static dw_attr_node *
5325 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5326 {
5327 dw_attr_node *a;
5328 unsigned ix;
5329 dw_die_ref spec = NULL;
5330
5331 if (! die)
5332 return NULL;
5333
5334 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5335 if (a->dw_attr == attr_kind)
5336 return a;
5337 else if (a->dw_attr == DW_AT_specification
5338 || a->dw_attr == DW_AT_abstract_origin)
5339 spec = AT_ref (a);
5340
5341 if (spec)
5342 return get_AT (spec, attr_kind);
5343
5344 return NULL;
5345 }
5346
5347 /* Returns the parent of the declaration of DIE. */
5348
5349 static dw_die_ref
5350 get_die_parent (dw_die_ref die)
5351 {
5352 dw_die_ref t;
5353
5354 if (!die)
5355 return NULL;
5356
5357 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5358 || (t = get_AT_ref (die, DW_AT_specification)))
5359 die = t;
5360
5361 return die->die_parent;
5362 }
5363
5364 /* Return the "low pc" attribute value, typically associated with a subprogram
5365 DIE. Return null if the "low pc" attribute is either not present, or if it
5366 cannot be represented as an assembler label identifier. */
5367
5368 static inline const char *
5369 get_AT_low_pc (dw_die_ref die)
5370 {
5371 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5372
5373 return a ? AT_lbl (a) : NULL;
5374 }
5375
5376 /* Return the "high pc" attribute value, typically associated with a subprogram
5377 DIE. Return null if the "high pc" attribute is either not present, or if it
5378 cannot be represented as an assembler label identifier. */
5379
5380 static inline const char *
5381 get_AT_hi_pc (dw_die_ref die)
5382 {
5383 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5384
5385 return a ? AT_lbl (a) : NULL;
5386 }
5387
5388 /* Return the value of the string attribute designated by ATTR_KIND, or
5389 NULL if it is not present. */
5390
5391 static inline const char *
5392 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5393 {
5394 dw_attr_node *a = get_AT (die, attr_kind);
5395
5396 return a ? AT_string (a) : NULL;
5397 }
5398
5399 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5400 if it is not present. */
5401
5402 static inline int
5403 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5404 {
5405 dw_attr_node *a = get_AT (die, attr_kind);
5406
5407 return a ? AT_flag (a) : 0;
5408 }
5409
5410 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5411 if it is not present. */
5412
5413 static inline unsigned
5414 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5415 {
5416 dw_attr_node *a = get_AT (die, attr_kind);
5417
5418 return a ? AT_unsigned (a) : 0;
5419 }
5420
5421 static inline dw_die_ref
5422 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5423 {
5424 dw_attr_node *a = get_AT (die, attr_kind);
5425
5426 return a ? AT_ref (a) : NULL;
5427 }
5428
5429 static inline struct dwarf_file_data *
5430 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5431 {
5432 dw_attr_node *a = get_AT (die, attr_kind);
5433
5434 return a ? AT_file (a) : NULL;
5435 }
5436
5437 /* Return TRUE if the language is C++. */
5438
5439 static inline bool
5440 is_cxx (void)
5441 {
5442 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5443
5444 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5445 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5446 }
5447
5448 /* Return TRUE if DECL was created by the C++ frontend. */
5449
5450 static bool
5451 is_cxx (const_tree decl)
5452 {
5453 if (in_lto_p)
5454 {
5455 const_tree context = get_ultimate_context (decl);
5456 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5457 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5458 }
5459 return is_cxx ();
5460 }
5461
5462 /* Return TRUE if the language is Fortran. */
5463
5464 static inline bool
5465 is_fortran (void)
5466 {
5467 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5468
5469 return (lang == DW_LANG_Fortran77
5470 || lang == DW_LANG_Fortran90
5471 || lang == DW_LANG_Fortran95
5472 || lang == DW_LANG_Fortran03
5473 || lang == DW_LANG_Fortran08);
5474 }
5475
5476 static inline bool
5477 is_fortran (const_tree decl)
5478 {
5479 if (in_lto_p)
5480 {
5481 const_tree context = get_ultimate_context (decl);
5482 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5483 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5484 "GNU Fortran", 11) == 0
5485 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5486 "GNU F77") == 0);
5487 }
5488 return is_fortran ();
5489 }
5490
5491 /* Return TRUE if the language is Ada. */
5492
5493 static inline bool
5494 is_ada (void)
5495 {
5496 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5497
5498 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5499 }
5500
5501 /* Remove the specified attribute if present. Return TRUE if removal
5502 was successful. */
5503
5504 static bool
5505 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5506 {
5507 dw_attr_node *a;
5508 unsigned ix;
5509
5510 if (! die)
5511 return false;
5512
5513 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5514 if (a->dw_attr == attr_kind)
5515 {
5516 if (AT_class (a) == dw_val_class_str)
5517 if (a->dw_attr_val.v.val_str->refcount)
5518 a->dw_attr_val.v.val_str->refcount--;
5519
5520 /* vec::ordered_remove should help reduce the number of abbrevs
5521 that are needed. */
5522 die->die_attr->ordered_remove (ix);
5523 return true;
5524 }
5525 return false;
5526 }
5527
5528 /* Remove CHILD from its parent. PREV must have the property that
5529 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5530
5531 static void
5532 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5533 {
5534 gcc_assert (child->die_parent == prev->die_parent);
5535 gcc_assert (prev->die_sib == child);
5536 if (prev == child)
5537 {
5538 gcc_assert (child->die_parent->die_child == child);
5539 prev = NULL;
5540 }
5541 else
5542 prev->die_sib = child->die_sib;
5543 if (child->die_parent->die_child == child)
5544 child->die_parent->die_child = prev;
5545 child->die_sib = NULL;
5546 }
5547
5548 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5549 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5550
5551 static void
5552 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5553 {
5554 dw_die_ref parent = old_child->die_parent;
5555
5556 gcc_assert (parent == prev->die_parent);
5557 gcc_assert (prev->die_sib == old_child);
5558
5559 new_child->die_parent = parent;
5560 if (prev == old_child)
5561 {
5562 gcc_assert (parent->die_child == old_child);
5563 new_child->die_sib = new_child;
5564 }
5565 else
5566 {
5567 prev->die_sib = new_child;
5568 new_child->die_sib = old_child->die_sib;
5569 }
5570 if (old_child->die_parent->die_child == old_child)
5571 old_child->die_parent->die_child = new_child;
5572 old_child->die_sib = NULL;
5573 }
5574
5575 /* Move all children from OLD_PARENT to NEW_PARENT. */
5576
5577 static void
5578 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5579 {
5580 dw_die_ref c;
5581 new_parent->die_child = old_parent->die_child;
5582 old_parent->die_child = NULL;
5583 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5584 }
5585
5586 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5587 matches TAG. */
5588
5589 static void
5590 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5591 {
5592 dw_die_ref c;
5593
5594 c = die->die_child;
5595 if (c) do {
5596 dw_die_ref prev = c;
5597 c = c->die_sib;
5598 while (c->die_tag == tag)
5599 {
5600 remove_child_with_prev (c, prev);
5601 c->die_parent = NULL;
5602 /* Might have removed every child. */
5603 if (die->die_child == NULL)
5604 return;
5605 c = prev->die_sib;
5606 }
5607 } while (c != die->die_child);
5608 }
5609
5610 /* Add a CHILD_DIE as the last child of DIE. */
5611
5612 static void
5613 add_child_die (dw_die_ref die, dw_die_ref child_die)
5614 {
5615 /* FIXME this should probably be an assert. */
5616 if (! die || ! child_die)
5617 return;
5618 gcc_assert (die != child_die);
5619
5620 child_die->die_parent = die;
5621 if (die->die_child)
5622 {
5623 child_die->die_sib = die->die_child->die_sib;
5624 die->die_child->die_sib = child_die;
5625 }
5626 else
5627 child_die->die_sib = child_die;
5628 die->die_child = child_die;
5629 }
5630
5631 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5632
5633 static void
5634 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5635 dw_die_ref after_die)
5636 {
5637 gcc_assert (die
5638 && child_die
5639 && after_die
5640 && die->die_child
5641 && die != child_die);
5642
5643 child_die->die_parent = die;
5644 child_die->die_sib = after_die->die_sib;
5645 after_die->die_sib = child_die;
5646 if (die->die_child == after_die)
5647 die->die_child = child_die;
5648 }
5649
5650 /* Unassociate CHILD from its parent, and make its parent be
5651 NEW_PARENT. */
5652
5653 static void
5654 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5655 {
5656 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5657 if (p->die_sib == child)
5658 {
5659 remove_child_with_prev (child, p);
5660 break;
5661 }
5662 add_child_die (new_parent, child);
5663 }
5664
5665 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5666 is the specification, to the end of PARENT's list of children.
5667 This is done by removing and re-adding it. */
5668
5669 static void
5670 splice_child_die (dw_die_ref parent, dw_die_ref child)
5671 {
5672 /* We want the declaration DIE from inside the class, not the
5673 specification DIE at toplevel. */
5674 if (child->die_parent != parent)
5675 {
5676 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5677
5678 if (tmp)
5679 child = tmp;
5680 }
5681
5682 gcc_assert (child->die_parent == parent
5683 || (child->die_parent
5684 == get_AT_ref (parent, DW_AT_specification)));
5685
5686 reparent_child (child, parent);
5687 }
5688
5689 /* Create and return a new die with TAG_VALUE as tag. */
5690
5691 static inline dw_die_ref
5692 new_die_raw (enum dwarf_tag tag_value)
5693 {
5694 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5695 die->die_tag = tag_value;
5696 return die;
5697 }
5698
5699 /* Create and return a new die with a parent of PARENT_DIE. If
5700 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5701 associated tree T must be supplied to determine parenthood
5702 later. */
5703
5704 static inline dw_die_ref
5705 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5706 {
5707 dw_die_ref die = new_die_raw (tag_value);
5708
5709 if (parent_die != NULL)
5710 add_child_die (parent_die, die);
5711 else
5712 {
5713 limbo_die_node *limbo_node;
5714
5715 /* No DIEs created after early dwarf should end up in limbo,
5716 because the limbo list should not persist past LTO
5717 streaming. */
5718 if (tag_value != DW_TAG_compile_unit
5719 /* These are allowed because they're generated while
5720 breaking out COMDAT units late. */
5721 && tag_value != DW_TAG_type_unit
5722 && tag_value != DW_TAG_skeleton_unit
5723 && !early_dwarf
5724 /* Allow nested functions to live in limbo because they will
5725 only temporarily live there, as decls_for_scope will fix
5726 them up. */
5727 && (TREE_CODE (t) != FUNCTION_DECL
5728 || !decl_function_context (t))
5729 /* Same as nested functions above but for types. Types that
5730 are local to a function will be fixed in
5731 decls_for_scope. */
5732 && (!RECORD_OR_UNION_TYPE_P (t)
5733 || !TYPE_CONTEXT (t)
5734 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5735 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5736 especially in the ltrans stage, but once we implement LTO
5737 dwarf streaming, we should remove this exception. */
5738 && !in_lto_p)
5739 {
5740 fprintf (stderr, "symbol ended up in limbo too late:");
5741 debug_generic_stmt (t);
5742 gcc_unreachable ();
5743 }
5744
5745 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5746 limbo_node->die = die;
5747 limbo_node->created_for = t;
5748 limbo_node->next = limbo_die_list;
5749 limbo_die_list = limbo_node;
5750 }
5751
5752 return die;
5753 }
5754
5755 /* Return the DIE associated with the given type specifier. */
5756
5757 static inline dw_die_ref
5758 lookup_type_die (tree type)
5759 {
5760 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5761 if (die && die->removed)
5762 {
5763 TYPE_SYMTAB_DIE (type) = NULL;
5764 return NULL;
5765 }
5766 return die;
5767 }
5768
5769 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5770 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5771 anonymous type instead the one of the naming typedef. */
5772
5773 static inline dw_die_ref
5774 strip_naming_typedef (tree type, dw_die_ref type_die)
5775 {
5776 if (type
5777 && TREE_CODE (type) == RECORD_TYPE
5778 && type_die
5779 && type_die->die_tag == DW_TAG_typedef
5780 && is_naming_typedef_decl (TYPE_NAME (type)))
5781 type_die = get_AT_ref (type_die, DW_AT_type);
5782 return type_die;
5783 }
5784
5785 /* Like lookup_type_die, but if type is an anonymous type named by a
5786 typedef[1], return the DIE of the anonymous type instead the one of
5787 the naming typedef. This is because in gen_typedef_die, we did
5788 equate the anonymous struct named by the typedef with the DIE of
5789 the naming typedef. So by default, lookup_type_die on an anonymous
5790 struct yields the DIE of the naming typedef.
5791
5792 [1]: Read the comment of is_naming_typedef_decl to learn about what
5793 a naming typedef is. */
5794
5795 static inline dw_die_ref
5796 lookup_type_die_strip_naming_typedef (tree type)
5797 {
5798 dw_die_ref die = lookup_type_die (type);
5799 return strip_naming_typedef (type, die);
5800 }
5801
5802 /* Equate a DIE to a given type specifier. */
5803
5804 static inline void
5805 equate_type_number_to_die (tree type, dw_die_ref type_die)
5806 {
5807 TYPE_SYMTAB_DIE (type) = type_die;
5808 }
5809
5810 /* Returns a hash value for X (which really is a die_struct). */
5811
5812 inline hashval_t
5813 decl_die_hasher::hash (die_node *x)
5814 {
5815 return (hashval_t) x->decl_id;
5816 }
5817
5818 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5819
5820 inline bool
5821 decl_die_hasher::equal (die_node *x, tree y)
5822 {
5823 return (x->decl_id == DECL_UID (y));
5824 }
5825
5826 /* Return the DIE associated with a given declaration. */
5827
5828 static inline dw_die_ref
5829 lookup_decl_die (tree decl)
5830 {
5831 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5832 NO_INSERT);
5833 if (!die)
5834 return NULL;
5835 if ((*die)->removed)
5836 {
5837 decl_die_table->clear_slot (die);
5838 return NULL;
5839 }
5840 return *die;
5841 }
5842
5843
5844 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5845 style reference. Return true if we found one refering to a DIE for
5846 DECL, otherwise return false. */
5847
5848 static bool
5849 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5850 unsigned HOST_WIDE_INT *off)
5851 {
5852 dw_die_ref die;
5853
5854 if (in_lto_p && !decl_die_table)
5855 return false;
5856
5857 if (TREE_CODE (decl) == BLOCK)
5858 die = BLOCK_DIE (decl);
5859 else
5860 die = lookup_decl_die (decl);
5861 if (!die)
5862 return false;
5863
5864 /* During WPA stage and incremental linking we currently use DIEs
5865 to store the decl <-> label + offset map. That's quite inefficient
5866 but it works for now. */
5867 if (in_lto_p)
5868 {
5869 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5870 if (!ref)
5871 {
5872 gcc_assert (die == comp_unit_die ());
5873 return false;
5874 }
5875 *off = ref->die_offset;
5876 *sym = ref->die_id.die_symbol;
5877 return true;
5878 }
5879
5880 /* Similar to get_ref_die_offset_label, but using the "correct"
5881 label. */
5882 *off = die->die_offset;
5883 while (die->die_parent)
5884 die = die->die_parent;
5885 /* For the containing CU DIE we compute a die_symbol in
5886 compute_comp_unit_symbol. */
5887 gcc_assert (die->die_tag == DW_TAG_compile_unit
5888 && die->die_id.die_symbol != NULL);
5889 *sym = die->die_id.die_symbol;
5890 return true;
5891 }
5892
5893 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5894
5895 static void
5896 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5897 const char *symbol, HOST_WIDE_INT offset)
5898 {
5899 /* Create a fake DIE that contains the reference. Don't use
5900 new_die because we don't want to end up in the limbo list. */
5901 dw_die_ref ref = new_die_raw (die->die_tag);
5902 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5903 ref->die_offset = offset;
5904 ref->with_offset = 1;
5905 add_AT_die_ref (die, attr_kind, ref);
5906 }
5907
5908 /* Create a DIE for DECL if required and add a reference to a DIE
5909 at SYMBOL + OFFSET which contains attributes dumped early. */
5910
5911 static void
5912 dwarf2out_register_external_die (tree decl, const char *sym,
5913 unsigned HOST_WIDE_INT off)
5914 {
5915 if (debug_info_level == DINFO_LEVEL_NONE)
5916 return;
5917
5918 if ((flag_wpa
5919 || flag_incremental_link == INCREMENTAL_LINK_LTO) && !decl_die_table)
5920 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5921
5922 dw_die_ref die
5923 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5924 gcc_assert (!die);
5925
5926 tree ctx;
5927 dw_die_ref parent = NULL;
5928 /* Need to lookup a DIE for the decls context - the containing
5929 function or translation unit. */
5930 if (TREE_CODE (decl) == BLOCK)
5931 {
5932 ctx = BLOCK_SUPERCONTEXT (decl);
5933 /* ??? We do not output DIEs for all scopes thus skip as
5934 many DIEs as needed. */
5935 while (TREE_CODE (ctx) == BLOCK
5936 && !BLOCK_DIE (ctx))
5937 ctx = BLOCK_SUPERCONTEXT (ctx);
5938 }
5939 else
5940 ctx = DECL_CONTEXT (decl);
5941 /* Peel types in the context stack. */
5942 while (ctx && TYPE_P (ctx))
5943 ctx = TYPE_CONTEXT (ctx);
5944 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5945 if (debug_info_level <= DINFO_LEVEL_TERSE)
5946 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5947 ctx = DECL_CONTEXT (ctx);
5948 if (ctx)
5949 {
5950 if (TREE_CODE (ctx) == BLOCK)
5951 parent = BLOCK_DIE (ctx);
5952 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5953 /* Keep the 1:1 association during WPA. */
5954 && !flag_wpa
5955 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5956 /* Otherwise all late annotations go to the main CU which
5957 imports the original CUs. */
5958 parent = comp_unit_die ();
5959 else if (TREE_CODE (ctx) == FUNCTION_DECL
5960 && TREE_CODE (decl) != FUNCTION_DECL
5961 && TREE_CODE (decl) != PARM_DECL
5962 && TREE_CODE (decl) != RESULT_DECL
5963 && TREE_CODE (decl) != BLOCK)
5964 /* Leave function local entities parent determination to when
5965 we process scope vars. */
5966 ;
5967 else
5968 parent = lookup_decl_die (ctx);
5969 }
5970 else
5971 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5972 Handle this case gracefully by globalizing stuff. */
5973 parent = comp_unit_die ();
5974 /* Create a DIE "stub". */
5975 switch (TREE_CODE (decl))
5976 {
5977 case TRANSLATION_UNIT_DECL:
5978 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
5979 {
5980 die = comp_unit_die ();
5981 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5982 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5983 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5984 to create a DIE for the original CUs. */
5985 return;
5986 }
5987 /* Keep the 1:1 association during WPA. */
5988 die = new_die (DW_TAG_compile_unit, NULL, decl);
5989 break;
5990 case NAMESPACE_DECL:
5991 if (is_fortran (decl))
5992 die = new_die (DW_TAG_module, parent, decl);
5993 else
5994 die = new_die (DW_TAG_namespace, parent, decl);
5995 break;
5996 case FUNCTION_DECL:
5997 die = new_die (DW_TAG_subprogram, parent, decl);
5998 break;
5999 case VAR_DECL:
6000 die = new_die (DW_TAG_variable, parent, decl);
6001 break;
6002 case RESULT_DECL:
6003 die = new_die (DW_TAG_variable, parent, decl);
6004 break;
6005 case PARM_DECL:
6006 die = new_die (DW_TAG_formal_parameter, parent, decl);
6007 break;
6008 case CONST_DECL:
6009 die = new_die (DW_TAG_constant, parent, decl);
6010 break;
6011 case LABEL_DECL:
6012 die = new_die (DW_TAG_label, parent, decl);
6013 break;
6014 case BLOCK:
6015 die = new_die (DW_TAG_lexical_block, parent, decl);
6016 break;
6017 default:
6018 gcc_unreachable ();
6019 }
6020 if (TREE_CODE (decl) == BLOCK)
6021 BLOCK_DIE (decl) = die;
6022 else
6023 equate_decl_number_to_die (decl, die);
6024
6025 /* Add a reference to the DIE providing early debug at $sym + off. */
6026 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6027 }
6028
6029 /* Returns a hash value for X (which really is a var_loc_list). */
6030
6031 inline hashval_t
6032 decl_loc_hasher::hash (var_loc_list *x)
6033 {
6034 return (hashval_t) x->decl_id;
6035 }
6036
6037 /* Return nonzero if decl_id of var_loc_list X is the same as
6038 UID of decl *Y. */
6039
6040 inline bool
6041 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6042 {
6043 return (x->decl_id == DECL_UID (y));
6044 }
6045
6046 /* Return the var_loc list associated with a given declaration. */
6047
6048 static inline var_loc_list *
6049 lookup_decl_loc (const_tree decl)
6050 {
6051 if (!decl_loc_table)
6052 return NULL;
6053 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6054 }
6055
6056 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6057
6058 inline hashval_t
6059 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6060 {
6061 return (hashval_t) x->decl_id;
6062 }
6063
6064 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6065 UID of decl *Y. */
6066
6067 inline bool
6068 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6069 {
6070 return (x->decl_id == DECL_UID (y));
6071 }
6072
6073 /* Equate a DIE to a particular declaration. */
6074
6075 static void
6076 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6077 {
6078 unsigned int decl_id = DECL_UID (decl);
6079
6080 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6081 decl_die->decl_id = decl_id;
6082 }
6083
6084 /* Return how many bits covers PIECE EXPR_LIST. */
6085
6086 static HOST_WIDE_INT
6087 decl_piece_bitsize (rtx piece)
6088 {
6089 int ret = (int) GET_MODE (piece);
6090 if (ret)
6091 return ret;
6092 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6093 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6094 return INTVAL (XEXP (XEXP (piece, 0), 0));
6095 }
6096
6097 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6098
6099 static rtx *
6100 decl_piece_varloc_ptr (rtx piece)
6101 {
6102 if ((int) GET_MODE (piece))
6103 return &XEXP (piece, 0);
6104 else
6105 return &XEXP (XEXP (piece, 0), 1);
6106 }
6107
6108 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6109 Next is the chain of following piece nodes. */
6110
6111 static rtx_expr_list *
6112 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6113 {
6114 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6115 return alloc_EXPR_LIST (bitsize, loc_note, next);
6116 else
6117 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6118 GEN_INT (bitsize),
6119 loc_note), next);
6120 }
6121
6122 /* Return rtx that should be stored into loc field for
6123 LOC_NOTE and BITPOS/BITSIZE. */
6124
6125 static rtx
6126 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6127 HOST_WIDE_INT bitsize)
6128 {
6129 if (bitsize != -1)
6130 {
6131 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6132 if (bitpos != 0)
6133 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6134 }
6135 return loc_note;
6136 }
6137
6138 /* This function either modifies location piece list *DEST in
6139 place (if SRC and INNER is NULL), or copies location piece list
6140 *SRC to *DEST while modifying it. Location BITPOS is modified
6141 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6142 not copied and if needed some padding around it is added.
6143 When modifying in place, DEST should point to EXPR_LIST where
6144 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6145 to the start of the whole list and INNER points to the EXPR_LIST
6146 where earlier pieces cover PIECE_BITPOS bits. */
6147
6148 static void
6149 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6150 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6151 HOST_WIDE_INT bitsize, rtx loc_note)
6152 {
6153 HOST_WIDE_INT diff;
6154 bool copy = inner != NULL;
6155
6156 if (copy)
6157 {
6158 /* First copy all nodes preceding the current bitpos. */
6159 while (src != inner)
6160 {
6161 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6162 decl_piece_bitsize (*src), NULL_RTX);
6163 dest = &XEXP (*dest, 1);
6164 src = &XEXP (*src, 1);
6165 }
6166 }
6167 /* Add padding if needed. */
6168 if (bitpos != piece_bitpos)
6169 {
6170 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6171 copy ? NULL_RTX : *dest);
6172 dest = &XEXP (*dest, 1);
6173 }
6174 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6175 {
6176 gcc_assert (!copy);
6177 /* A piece with correct bitpos and bitsize already exist,
6178 just update the location for it and return. */
6179 *decl_piece_varloc_ptr (*dest) = loc_note;
6180 return;
6181 }
6182 /* Add the piece that changed. */
6183 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6184 dest = &XEXP (*dest, 1);
6185 /* Skip over pieces that overlap it. */
6186 diff = bitpos - piece_bitpos + bitsize;
6187 if (!copy)
6188 src = dest;
6189 while (diff > 0 && *src)
6190 {
6191 rtx piece = *src;
6192 diff -= decl_piece_bitsize (piece);
6193 if (copy)
6194 src = &XEXP (piece, 1);
6195 else
6196 {
6197 *src = XEXP (piece, 1);
6198 free_EXPR_LIST_node (piece);
6199 }
6200 }
6201 /* Add padding if needed. */
6202 if (diff < 0 && *src)
6203 {
6204 if (!copy)
6205 dest = src;
6206 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6207 dest = &XEXP (*dest, 1);
6208 }
6209 if (!copy)
6210 return;
6211 /* Finally copy all nodes following it. */
6212 while (*src)
6213 {
6214 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6215 decl_piece_bitsize (*src), NULL_RTX);
6216 dest = &XEXP (*dest, 1);
6217 src = &XEXP (*src, 1);
6218 }
6219 }
6220
6221 /* Add a variable location node to the linked list for DECL. */
6222
6223 static struct var_loc_node *
6224 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6225 {
6226 unsigned int decl_id;
6227 var_loc_list *temp;
6228 struct var_loc_node *loc = NULL;
6229 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6230
6231 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6232 {
6233 tree realdecl = DECL_DEBUG_EXPR (decl);
6234 if (handled_component_p (realdecl)
6235 || (TREE_CODE (realdecl) == MEM_REF
6236 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6237 {
6238 bool reverse;
6239 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6240 &bitsize, &reverse);
6241 if (!innerdecl
6242 || !DECL_P (innerdecl)
6243 || DECL_IGNORED_P (innerdecl)
6244 || TREE_STATIC (innerdecl)
6245 || bitsize == 0
6246 || bitpos + bitsize > 256)
6247 return NULL;
6248 decl = innerdecl;
6249 }
6250 }
6251
6252 decl_id = DECL_UID (decl);
6253 var_loc_list **slot
6254 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6255 if (*slot == NULL)
6256 {
6257 temp = ggc_cleared_alloc<var_loc_list> ();
6258 temp->decl_id = decl_id;
6259 *slot = temp;
6260 }
6261 else
6262 temp = *slot;
6263
6264 /* For PARM_DECLs try to keep around the original incoming value,
6265 even if that means we'll emit a zero-range .debug_loc entry. */
6266 if (temp->last
6267 && temp->first == temp->last
6268 && TREE_CODE (decl) == PARM_DECL
6269 && NOTE_P (temp->first->loc)
6270 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6271 && DECL_INCOMING_RTL (decl)
6272 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6273 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6274 == GET_CODE (DECL_INCOMING_RTL (decl))
6275 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6276 && (bitsize != -1
6277 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6278 NOTE_VAR_LOCATION_LOC (loc_note))
6279 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6280 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6281 {
6282 loc = ggc_cleared_alloc<var_loc_node> ();
6283 temp->first->next = loc;
6284 temp->last = loc;
6285 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6286 }
6287 else if (temp->last)
6288 {
6289 struct var_loc_node *last = temp->last, *unused = NULL;
6290 rtx *piece_loc = NULL, last_loc_note;
6291 HOST_WIDE_INT piece_bitpos = 0;
6292 if (last->next)
6293 {
6294 last = last->next;
6295 gcc_assert (last->next == NULL);
6296 }
6297 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6298 {
6299 piece_loc = &last->loc;
6300 do
6301 {
6302 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6303 if (piece_bitpos + cur_bitsize > bitpos)
6304 break;
6305 piece_bitpos += cur_bitsize;
6306 piece_loc = &XEXP (*piece_loc, 1);
6307 }
6308 while (*piece_loc);
6309 }
6310 /* TEMP->LAST here is either pointer to the last but one or
6311 last element in the chained list, LAST is pointer to the
6312 last element. */
6313 if (label && strcmp (last->label, label) == 0 && last->view == view)
6314 {
6315 /* For SRA optimized variables if there weren't any real
6316 insns since last note, just modify the last node. */
6317 if (piece_loc != NULL)
6318 {
6319 adjust_piece_list (piece_loc, NULL, NULL,
6320 bitpos, piece_bitpos, bitsize, loc_note);
6321 return NULL;
6322 }
6323 /* If the last note doesn't cover any instructions, remove it. */
6324 if (temp->last != last)
6325 {
6326 temp->last->next = NULL;
6327 unused = last;
6328 last = temp->last;
6329 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6330 }
6331 else
6332 {
6333 gcc_assert (temp->first == temp->last
6334 || (temp->first->next == temp->last
6335 && TREE_CODE (decl) == PARM_DECL));
6336 memset (temp->last, '\0', sizeof (*temp->last));
6337 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6338 return temp->last;
6339 }
6340 }
6341 if (bitsize == -1 && NOTE_P (last->loc))
6342 last_loc_note = last->loc;
6343 else if (piece_loc != NULL
6344 && *piece_loc != NULL_RTX
6345 && piece_bitpos == bitpos
6346 && decl_piece_bitsize (*piece_loc) == bitsize)
6347 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6348 else
6349 last_loc_note = NULL_RTX;
6350 /* If the current location is the same as the end of the list,
6351 and either both or neither of the locations is uninitialized,
6352 we have nothing to do. */
6353 if (last_loc_note == NULL_RTX
6354 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6355 NOTE_VAR_LOCATION_LOC (loc_note)))
6356 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6357 != NOTE_VAR_LOCATION_STATUS (loc_note))
6358 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6359 == VAR_INIT_STATUS_UNINITIALIZED)
6360 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6361 == VAR_INIT_STATUS_UNINITIALIZED))))
6362 {
6363 /* Add LOC to the end of list and update LAST. If the last
6364 element of the list has been removed above, reuse its
6365 memory for the new node, otherwise allocate a new one. */
6366 if (unused)
6367 {
6368 loc = unused;
6369 memset (loc, '\0', sizeof (*loc));
6370 }
6371 else
6372 loc = ggc_cleared_alloc<var_loc_node> ();
6373 if (bitsize == -1 || piece_loc == NULL)
6374 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6375 else
6376 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6377 bitpos, piece_bitpos, bitsize, loc_note);
6378 last->next = loc;
6379 /* Ensure TEMP->LAST will point either to the new last but one
6380 element of the chain, or to the last element in it. */
6381 if (last != temp->last)
6382 temp->last = last;
6383 }
6384 else if (unused)
6385 ggc_free (unused);
6386 }
6387 else
6388 {
6389 loc = ggc_cleared_alloc<var_loc_node> ();
6390 temp->first = loc;
6391 temp->last = loc;
6392 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6393 }
6394 return loc;
6395 }
6396 \f
6397 /* Keep track of the number of spaces used to indent the
6398 output of the debugging routines that print the structure of
6399 the DIE internal representation. */
6400 static int print_indent;
6401
6402 /* Indent the line the number of spaces given by print_indent. */
6403
6404 static inline void
6405 print_spaces (FILE *outfile)
6406 {
6407 fprintf (outfile, "%*s", print_indent, "");
6408 }
6409
6410 /* Print a type signature in hex. */
6411
6412 static inline void
6413 print_signature (FILE *outfile, char *sig)
6414 {
6415 int i;
6416
6417 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6418 fprintf (outfile, "%02x", sig[i] & 0xff);
6419 }
6420
6421 static inline void
6422 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6423 {
6424 if (discr_value->pos)
6425 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6426 else
6427 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6428 }
6429
6430 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6431
6432 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6433 RECURSE, output location descriptor operations. */
6434
6435 static void
6436 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6437 {
6438 switch (val->val_class)
6439 {
6440 case dw_val_class_addr:
6441 fprintf (outfile, "address");
6442 break;
6443 case dw_val_class_offset:
6444 fprintf (outfile, "offset");
6445 break;
6446 case dw_val_class_loc:
6447 fprintf (outfile, "location descriptor");
6448 if (val->v.val_loc == NULL)
6449 fprintf (outfile, " -> <null>\n");
6450 else if (recurse)
6451 {
6452 fprintf (outfile, ":\n");
6453 print_indent += 4;
6454 print_loc_descr (val->v.val_loc, outfile);
6455 print_indent -= 4;
6456 }
6457 else
6458 {
6459 if (flag_dump_noaddr || flag_dump_unnumbered)
6460 fprintf (outfile, " #\n");
6461 else
6462 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6463 }
6464 break;
6465 case dw_val_class_loc_list:
6466 fprintf (outfile, "location list -> label:%s",
6467 val->v.val_loc_list->ll_symbol);
6468 break;
6469 case dw_val_class_view_list:
6470 val = view_list_to_loc_list_val_node (val);
6471 fprintf (outfile, "location list with views -> labels:%s and %s",
6472 val->v.val_loc_list->ll_symbol,
6473 val->v.val_loc_list->vl_symbol);
6474 break;
6475 case dw_val_class_range_list:
6476 fprintf (outfile, "range list");
6477 break;
6478 case dw_val_class_const:
6479 case dw_val_class_const_implicit:
6480 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6481 break;
6482 case dw_val_class_unsigned_const:
6483 case dw_val_class_unsigned_const_implicit:
6484 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6485 break;
6486 case dw_val_class_const_double:
6487 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6488 HOST_WIDE_INT_PRINT_UNSIGNED")",
6489 val->v.val_double.high,
6490 val->v.val_double.low);
6491 break;
6492 case dw_val_class_wide_int:
6493 {
6494 int i = val->v.val_wide->get_len ();
6495 fprintf (outfile, "constant (");
6496 gcc_assert (i > 0);
6497 if (val->v.val_wide->elt (i - 1) == 0)
6498 fprintf (outfile, "0x");
6499 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6500 val->v.val_wide->elt (--i));
6501 while (--i >= 0)
6502 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6503 val->v.val_wide->elt (i));
6504 fprintf (outfile, ")");
6505 break;
6506 }
6507 case dw_val_class_vec:
6508 fprintf (outfile, "floating-point or vector constant");
6509 break;
6510 case dw_val_class_flag:
6511 fprintf (outfile, "%u", val->v.val_flag);
6512 break;
6513 case dw_val_class_die_ref:
6514 if (val->v.val_die_ref.die != NULL)
6515 {
6516 dw_die_ref die = val->v.val_die_ref.die;
6517
6518 if (die->comdat_type_p)
6519 {
6520 fprintf (outfile, "die -> signature: ");
6521 print_signature (outfile,
6522 die->die_id.die_type_node->signature);
6523 }
6524 else if (die->die_id.die_symbol)
6525 {
6526 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6527 if (die->with_offset)
6528 fprintf (outfile, " + %ld", die->die_offset);
6529 }
6530 else
6531 fprintf (outfile, "die -> %ld", die->die_offset);
6532 if (flag_dump_noaddr || flag_dump_unnumbered)
6533 fprintf (outfile, " #");
6534 else
6535 fprintf (outfile, " (%p)", (void *) die);
6536 }
6537 else
6538 fprintf (outfile, "die -> <null>");
6539 break;
6540 case dw_val_class_vms_delta:
6541 fprintf (outfile, "delta: @slotcount(%s-%s)",
6542 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6543 break;
6544 case dw_val_class_symview:
6545 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6546 break;
6547 case dw_val_class_lbl_id:
6548 case dw_val_class_lineptr:
6549 case dw_val_class_macptr:
6550 case dw_val_class_loclistsptr:
6551 case dw_val_class_high_pc:
6552 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6553 break;
6554 case dw_val_class_str:
6555 if (val->v.val_str->str != NULL)
6556 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6557 else
6558 fprintf (outfile, "<null>");
6559 break;
6560 case dw_val_class_file:
6561 case dw_val_class_file_implicit:
6562 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6563 val->v.val_file->emitted_number);
6564 break;
6565 case dw_val_class_data8:
6566 {
6567 int i;
6568
6569 for (i = 0; i < 8; i++)
6570 fprintf (outfile, "%02x", val->v.val_data8[i]);
6571 break;
6572 }
6573 case dw_val_class_discr_value:
6574 print_discr_value (outfile, &val->v.val_discr_value);
6575 break;
6576 case dw_val_class_discr_list:
6577 for (dw_discr_list_ref node = val->v.val_discr_list;
6578 node != NULL;
6579 node = node->dw_discr_next)
6580 {
6581 if (node->dw_discr_range)
6582 {
6583 fprintf (outfile, " .. ");
6584 print_discr_value (outfile, &node->dw_discr_lower_bound);
6585 print_discr_value (outfile, &node->dw_discr_upper_bound);
6586 }
6587 else
6588 print_discr_value (outfile, &node->dw_discr_lower_bound);
6589
6590 if (node->dw_discr_next != NULL)
6591 fprintf (outfile, " | ");
6592 }
6593 default:
6594 break;
6595 }
6596 }
6597
6598 /* Likewise, for a DIE attribute. */
6599
6600 static void
6601 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6602 {
6603 print_dw_val (&a->dw_attr_val, recurse, outfile);
6604 }
6605
6606
6607 /* Print the list of operands in the LOC location description to OUTFILE. This
6608 routine is a debugging aid only. */
6609
6610 static void
6611 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6612 {
6613 dw_loc_descr_ref l = loc;
6614
6615 if (loc == NULL)
6616 {
6617 print_spaces (outfile);
6618 fprintf (outfile, "<null>\n");
6619 return;
6620 }
6621
6622 for (l = loc; l != NULL; l = l->dw_loc_next)
6623 {
6624 print_spaces (outfile);
6625 if (flag_dump_noaddr || flag_dump_unnumbered)
6626 fprintf (outfile, "#");
6627 else
6628 fprintf (outfile, "(%p)", (void *) l);
6629 fprintf (outfile, " %s",
6630 dwarf_stack_op_name (l->dw_loc_opc));
6631 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6632 {
6633 fprintf (outfile, " ");
6634 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6635 }
6636 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6637 {
6638 fprintf (outfile, ", ");
6639 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6640 }
6641 fprintf (outfile, "\n");
6642 }
6643 }
6644
6645 /* Print the information associated with a given DIE, and its children.
6646 This routine is a debugging aid only. */
6647
6648 static void
6649 print_die (dw_die_ref die, FILE *outfile)
6650 {
6651 dw_attr_node *a;
6652 dw_die_ref c;
6653 unsigned ix;
6654
6655 print_spaces (outfile);
6656 fprintf (outfile, "DIE %4ld: %s ",
6657 die->die_offset, dwarf_tag_name (die->die_tag));
6658 if (flag_dump_noaddr || flag_dump_unnumbered)
6659 fprintf (outfile, "#\n");
6660 else
6661 fprintf (outfile, "(%p)\n", (void*) die);
6662 print_spaces (outfile);
6663 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6664 fprintf (outfile, " offset: %ld", die->die_offset);
6665 fprintf (outfile, " mark: %d\n", die->die_mark);
6666
6667 if (die->comdat_type_p)
6668 {
6669 print_spaces (outfile);
6670 fprintf (outfile, " signature: ");
6671 print_signature (outfile, die->die_id.die_type_node->signature);
6672 fprintf (outfile, "\n");
6673 }
6674
6675 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6676 {
6677 print_spaces (outfile);
6678 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6679
6680 print_attribute (a, true, outfile);
6681 fprintf (outfile, "\n");
6682 }
6683
6684 if (die->die_child != NULL)
6685 {
6686 print_indent += 4;
6687 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6688 print_indent -= 4;
6689 }
6690 if (print_indent == 0)
6691 fprintf (outfile, "\n");
6692 }
6693
6694 /* Print the list of operations in the LOC location description. */
6695
6696 DEBUG_FUNCTION void
6697 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6698 {
6699 print_loc_descr (loc, stderr);
6700 }
6701
6702 /* Print the information collected for a given DIE. */
6703
6704 DEBUG_FUNCTION void
6705 debug_dwarf_die (dw_die_ref die)
6706 {
6707 print_die (die, stderr);
6708 }
6709
6710 DEBUG_FUNCTION void
6711 debug (die_struct &ref)
6712 {
6713 print_die (&ref, stderr);
6714 }
6715
6716 DEBUG_FUNCTION void
6717 debug (die_struct *ptr)
6718 {
6719 if (ptr)
6720 debug (*ptr);
6721 else
6722 fprintf (stderr, "<nil>\n");
6723 }
6724
6725
6726 /* Print all DWARF information collected for the compilation unit.
6727 This routine is a debugging aid only. */
6728
6729 DEBUG_FUNCTION void
6730 debug_dwarf (void)
6731 {
6732 print_indent = 0;
6733 print_die (comp_unit_die (), stderr);
6734 }
6735
6736 /* Verify the DIE tree structure. */
6737
6738 DEBUG_FUNCTION void
6739 verify_die (dw_die_ref die)
6740 {
6741 gcc_assert (!die->die_mark);
6742 if (die->die_parent == NULL
6743 && die->die_sib == NULL)
6744 return;
6745 /* Verify the die_sib list is cyclic. */
6746 dw_die_ref x = die;
6747 do
6748 {
6749 x->die_mark = 1;
6750 x = x->die_sib;
6751 }
6752 while (x && !x->die_mark);
6753 gcc_assert (x == die);
6754 x = die;
6755 do
6756 {
6757 /* Verify all dies have the same parent. */
6758 gcc_assert (x->die_parent == die->die_parent);
6759 if (x->die_child)
6760 {
6761 /* Verify the child has the proper parent and recurse. */
6762 gcc_assert (x->die_child->die_parent == x);
6763 verify_die (x->die_child);
6764 }
6765 x->die_mark = 0;
6766 x = x->die_sib;
6767 }
6768 while (x && x->die_mark);
6769 }
6770
6771 /* Sanity checks on DIEs. */
6772
6773 static void
6774 check_die (dw_die_ref die)
6775 {
6776 unsigned ix;
6777 dw_attr_node *a;
6778 bool inline_found = false;
6779 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6780 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6781 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6782 {
6783 switch (a->dw_attr)
6784 {
6785 case DW_AT_inline:
6786 if (a->dw_attr_val.v.val_unsigned)
6787 inline_found = true;
6788 break;
6789 case DW_AT_location:
6790 ++n_location;
6791 break;
6792 case DW_AT_low_pc:
6793 ++n_low_pc;
6794 break;
6795 case DW_AT_high_pc:
6796 ++n_high_pc;
6797 break;
6798 case DW_AT_artificial:
6799 ++n_artificial;
6800 break;
6801 case DW_AT_decl_column:
6802 ++n_decl_column;
6803 break;
6804 case DW_AT_decl_line:
6805 ++n_decl_line;
6806 break;
6807 case DW_AT_decl_file:
6808 ++n_decl_file;
6809 break;
6810 default:
6811 break;
6812 }
6813 }
6814 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6815 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6816 {
6817 fprintf (stderr, "Duplicate attributes in DIE:\n");
6818 debug_dwarf_die (die);
6819 gcc_unreachable ();
6820 }
6821 if (inline_found)
6822 {
6823 /* A debugging information entry that is a member of an abstract
6824 instance tree [that has DW_AT_inline] should not contain any
6825 attributes which describe aspects of the subroutine which vary
6826 between distinct inlined expansions or distinct out-of-line
6827 expansions. */
6828 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6829 gcc_assert (a->dw_attr != DW_AT_low_pc
6830 && a->dw_attr != DW_AT_high_pc
6831 && a->dw_attr != DW_AT_location
6832 && a->dw_attr != DW_AT_frame_base
6833 && a->dw_attr != DW_AT_call_all_calls
6834 && a->dw_attr != DW_AT_GNU_all_call_sites);
6835 }
6836 }
6837 \f
6838 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6839 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6840 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6841
6842 /* Calculate the checksum of a location expression. */
6843
6844 static inline void
6845 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6846 {
6847 int tem;
6848 inchash::hash hstate;
6849 hashval_t hash;
6850
6851 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6852 CHECKSUM (tem);
6853 hash_loc_operands (loc, hstate);
6854 hash = hstate.end();
6855 CHECKSUM (hash);
6856 }
6857
6858 /* Calculate the checksum of an attribute. */
6859
6860 static void
6861 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6862 {
6863 dw_loc_descr_ref loc;
6864 rtx r;
6865
6866 CHECKSUM (at->dw_attr);
6867
6868 /* We don't care that this was compiled with a different compiler
6869 snapshot; if the output is the same, that's what matters. */
6870 if (at->dw_attr == DW_AT_producer)
6871 return;
6872
6873 switch (AT_class (at))
6874 {
6875 case dw_val_class_const:
6876 case dw_val_class_const_implicit:
6877 CHECKSUM (at->dw_attr_val.v.val_int);
6878 break;
6879 case dw_val_class_unsigned_const:
6880 case dw_val_class_unsigned_const_implicit:
6881 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6882 break;
6883 case dw_val_class_const_double:
6884 CHECKSUM (at->dw_attr_val.v.val_double);
6885 break;
6886 case dw_val_class_wide_int:
6887 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6888 get_full_len (*at->dw_attr_val.v.val_wide)
6889 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6890 break;
6891 case dw_val_class_vec:
6892 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6893 (at->dw_attr_val.v.val_vec.length
6894 * at->dw_attr_val.v.val_vec.elt_size));
6895 break;
6896 case dw_val_class_flag:
6897 CHECKSUM (at->dw_attr_val.v.val_flag);
6898 break;
6899 case dw_val_class_str:
6900 CHECKSUM_STRING (AT_string (at));
6901 break;
6902
6903 case dw_val_class_addr:
6904 r = AT_addr (at);
6905 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6906 CHECKSUM_STRING (XSTR (r, 0));
6907 break;
6908
6909 case dw_val_class_offset:
6910 CHECKSUM (at->dw_attr_val.v.val_offset);
6911 break;
6912
6913 case dw_val_class_loc:
6914 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6915 loc_checksum (loc, ctx);
6916 break;
6917
6918 case dw_val_class_die_ref:
6919 die_checksum (AT_ref (at), ctx, mark);
6920 break;
6921
6922 case dw_val_class_fde_ref:
6923 case dw_val_class_vms_delta:
6924 case dw_val_class_symview:
6925 case dw_val_class_lbl_id:
6926 case dw_val_class_lineptr:
6927 case dw_val_class_macptr:
6928 case dw_val_class_loclistsptr:
6929 case dw_val_class_high_pc:
6930 break;
6931
6932 case dw_val_class_file:
6933 case dw_val_class_file_implicit:
6934 CHECKSUM_STRING (AT_file (at)->filename);
6935 break;
6936
6937 case dw_val_class_data8:
6938 CHECKSUM (at->dw_attr_val.v.val_data8);
6939 break;
6940
6941 default:
6942 break;
6943 }
6944 }
6945
6946 /* Calculate the checksum of a DIE. */
6947
6948 static void
6949 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6950 {
6951 dw_die_ref c;
6952 dw_attr_node *a;
6953 unsigned ix;
6954
6955 /* To avoid infinite recursion. */
6956 if (die->die_mark)
6957 {
6958 CHECKSUM (die->die_mark);
6959 return;
6960 }
6961 die->die_mark = ++(*mark);
6962
6963 CHECKSUM (die->die_tag);
6964
6965 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6966 attr_checksum (a, ctx, mark);
6967
6968 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6969 }
6970
6971 #undef CHECKSUM
6972 #undef CHECKSUM_BLOCK
6973 #undef CHECKSUM_STRING
6974
6975 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6976 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6977 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6978 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6979 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6980 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6981 #define CHECKSUM_ATTR(FOO) \
6982 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6983
6984 /* Calculate the checksum of a number in signed LEB128 format. */
6985
6986 static void
6987 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6988 {
6989 unsigned char byte;
6990 bool more;
6991
6992 while (1)
6993 {
6994 byte = (value & 0x7f);
6995 value >>= 7;
6996 more = !((value == 0 && (byte & 0x40) == 0)
6997 || (value == -1 && (byte & 0x40) != 0));
6998 if (more)
6999 byte |= 0x80;
7000 CHECKSUM (byte);
7001 if (!more)
7002 break;
7003 }
7004 }
7005
7006 /* Calculate the checksum of a number in unsigned LEB128 format. */
7007
7008 static void
7009 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7010 {
7011 while (1)
7012 {
7013 unsigned char byte = (value & 0x7f);
7014 value >>= 7;
7015 if (value != 0)
7016 /* More bytes to follow. */
7017 byte |= 0x80;
7018 CHECKSUM (byte);
7019 if (value == 0)
7020 break;
7021 }
7022 }
7023
7024 /* Checksum the context of the DIE. This adds the names of any
7025 surrounding namespaces or structures to the checksum. */
7026
7027 static void
7028 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7029 {
7030 const char *name;
7031 dw_die_ref spec;
7032 int tag = die->die_tag;
7033
7034 if (tag != DW_TAG_namespace
7035 && tag != DW_TAG_structure_type
7036 && tag != DW_TAG_class_type)
7037 return;
7038
7039 name = get_AT_string (die, DW_AT_name);
7040
7041 spec = get_AT_ref (die, DW_AT_specification);
7042 if (spec != NULL)
7043 die = spec;
7044
7045 if (die->die_parent != NULL)
7046 checksum_die_context (die->die_parent, ctx);
7047
7048 CHECKSUM_ULEB128 ('C');
7049 CHECKSUM_ULEB128 (tag);
7050 if (name != NULL)
7051 CHECKSUM_STRING (name);
7052 }
7053
7054 /* Calculate the checksum of a location expression. */
7055
7056 static inline void
7057 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7058 {
7059 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7060 were emitted as a DW_FORM_sdata instead of a location expression. */
7061 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7062 {
7063 CHECKSUM_ULEB128 (DW_FORM_sdata);
7064 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7065 return;
7066 }
7067
7068 /* Otherwise, just checksum the raw location expression. */
7069 while (loc != NULL)
7070 {
7071 inchash::hash hstate;
7072 hashval_t hash;
7073
7074 CHECKSUM_ULEB128 (loc->dtprel);
7075 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7076 hash_loc_operands (loc, hstate);
7077 hash = hstate.end ();
7078 CHECKSUM (hash);
7079 loc = loc->dw_loc_next;
7080 }
7081 }
7082
7083 /* Calculate the checksum of an attribute. */
7084
7085 static void
7086 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7087 struct md5_ctx *ctx, int *mark)
7088 {
7089 dw_loc_descr_ref loc;
7090 rtx r;
7091
7092 if (AT_class (at) == dw_val_class_die_ref)
7093 {
7094 dw_die_ref target_die = AT_ref (at);
7095
7096 /* For pointer and reference types, we checksum only the (qualified)
7097 name of the target type (if there is a name). For friend entries,
7098 we checksum only the (qualified) name of the target type or function.
7099 This allows the checksum to remain the same whether the target type
7100 is complete or not. */
7101 if ((at->dw_attr == DW_AT_type
7102 && (tag == DW_TAG_pointer_type
7103 || tag == DW_TAG_reference_type
7104 || tag == DW_TAG_rvalue_reference_type
7105 || tag == DW_TAG_ptr_to_member_type))
7106 || (at->dw_attr == DW_AT_friend
7107 && tag == DW_TAG_friend))
7108 {
7109 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7110
7111 if (name_attr != NULL)
7112 {
7113 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7114
7115 if (decl == NULL)
7116 decl = target_die;
7117 CHECKSUM_ULEB128 ('N');
7118 CHECKSUM_ULEB128 (at->dw_attr);
7119 if (decl->die_parent != NULL)
7120 checksum_die_context (decl->die_parent, ctx);
7121 CHECKSUM_ULEB128 ('E');
7122 CHECKSUM_STRING (AT_string (name_attr));
7123 return;
7124 }
7125 }
7126
7127 /* For all other references to another DIE, we check to see if the
7128 target DIE has already been visited. If it has, we emit a
7129 backward reference; if not, we descend recursively. */
7130 if (target_die->die_mark > 0)
7131 {
7132 CHECKSUM_ULEB128 ('R');
7133 CHECKSUM_ULEB128 (at->dw_attr);
7134 CHECKSUM_ULEB128 (target_die->die_mark);
7135 }
7136 else
7137 {
7138 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7139
7140 if (decl == NULL)
7141 decl = target_die;
7142 target_die->die_mark = ++(*mark);
7143 CHECKSUM_ULEB128 ('T');
7144 CHECKSUM_ULEB128 (at->dw_attr);
7145 if (decl->die_parent != NULL)
7146 checksum_die_context (decl->die_parent, ctx);
7147 die_checksum_ordered (target_die, ctx, mark);
7148 }
7149 return;
7150 }
7151
7152 CHECKSUM_ULEB128 ('A');
7153 CHECKSUM_ULEB128 (at->dw_attr);
7154
7155 switch (AT_class (at))
7156 {
7157 case dw_val_class_const:
7158 case dw_val_class_const_implicit:
7159 CHECKSUM_ULEB128 (DW_FORM_sdata);
7160 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7161 break;
7162
7163 case dw_val_class_unsigned_const:
7164 case dw_val_class_unsigned_const_implicit:
7165 CHECKSUM_ULEB128 (DW_FORM_sdata);
7166 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7167 break;
7168
7169 case dw_val_class_const_double:
7170 CHECKSUM_ULEB128 (DW_FORM_block);
7171 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7172 CHECKSUM (at->dw_attr_val.v.val_double);
7173 break;
7174
7175 case dw_val_class_wide_int:
7176 CHECKSUM_ULEB128 (DW_FORM_block);
7177 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7178 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7179 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7180 get_full_len (*at->dw_attr_val.v.val_wide)
7181 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7182 break;
7183
7184 case dw_val_class_vec:
7185 CHECKSUM_ULEB128 (DW_FORM_block);
7186 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7187 * at->dw_attr_val.v.val_vec.elt_size);
7188 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7189 (at->dw_attr_val.v.val_vec.length
7190 * at->dw_attr_val.v.val_vec.elt_size));
7191 break;
7192
7193 case dw_val_class_flag:
7194 CHECKSUM_ULEB128 (DW_FORM_flag);
7195 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7196 break;
7197
7198 case dw_val_class_str:
7199 CHECKSUM_ULEB128 (DW_FORM_string);
7200 CHECKSUM_STRING (AT_string (at));
7201 break;
7202
7203 case dw_val_class_addr:
7204 r = AT_addr (at);
7205 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7206 CHECKSUM_ULEB128 (DW_FORM_string);
7207 CHECKSUM_STRING (XSTR (r, 0));
7208 break;
7209
7210 case dw_val_class_offset:
7211 CHECKSUM_ULEB128 (DW_FORM_sdata);
7212 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7213 break;
7214
7215 case dw_val_class_loc:
7216 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7217 loc_checksum_ordered (loc, ctx);
7218 break;
7219
7220 case dw_val_class_fde_ref:
7221 case dw_val_class_symview:
7222 case dw_val_class_lbl_id:
7223 case dw_val_class_lineptr:
7224 case dw_val_class_macptr:
7225 case dw_val_class_loclistsptr:
7226 case dw_val_class_high_pc:
7227 break;
7228
7229 case dw_val_class_file:
7230 case dw_val_class_file_implicit:
7231 CHECKSUM_ULEB128 (DW_FORM_string);
7232 CHECKSUM_STRING (AT_file (at)->filename);
7233 break;
7234
7235 case dw_val_class_data8:
7236 CHECKSUM (at->dw_attr_val.v.val_data8);
7237 break;
7238
7239 default:
7240 break;
7241 }
7242 }
7243
7244 struct checksum_attributes
7245 {
7246 dw_attr_node *at_name;
7247 dw_attr_node *at_type;
7248 dw_attr_node *at_friend;
7249 dw_attr_node *at_accessibility;
7250 dw_attr_node *at_address_class;
7251 dw_attr_node *at_alignment;
7252 dw_attr_node *at_allocated;
7253 dw_attr_node *at_artificial;
7254 dw_attr_node *at_associated;
7255 dw_attr_node *at_binary_scale;
7256 dw_attr_node *at_bit_offset;
7257 dw_attr_node *at_bit_size;
7258 dw_attr_node *at_bit_stride;
7259 dw_attr_node *at_byte_size;
7260 dw_attr_node *at_byte_stride;
7261 dw_attr_node *at_const_value;
7262 dw_attr_node *at_containing_type;
7263 dw_attr_node *at_count;
7264 dw_attr_node *at_data_location;
7265 dw_attr_node *at_data_member_location;
7266 dw_attr_node *at_decimal_scale;
7267 dw_attr_node *at_decimal_sign;
7268 dw_attr_node *at_default_value;
7269 dw_attr_node *at_digit_count;
7270 dw_attr_node *at_discr;
7271 dw_attr_node *at_discr_list;
7272 dw_attr_node *at_discr_value;
7273 dw_attr_node *at_encoding;
7274 dw_attr_node *at_endianity;
7275 dw_attr_node *at_explicit;
7276 dw_attr_node *at_is_optional;
7277 dw_attr_node *at_location;
7278 dw_attr_node *at_lower_bound;
7279 dw_attr_node *at_mutable;
7280 dw_attr_node *at_ordering;
7281 dw_attr_node *at_picture_string;
7282 dw_attr_node *at_prototyped;
7283 dw_attr_node *at_small;
7284 dw_attr_node *at_segment;
7285 dw_attr_node *at_string_length;
7286 dw_attr_node *at_string_length_bit_size;
7287 dw_attr_node *at_string_length_byte_size;
7288 dw_attr_node *at_threads_scaled;
7289 dw_attr_node *at_upper_bound;
7290 dw_attr_node *at_use_location;
7291 dw_attr_node *at_use_UTF8;
7292 dw_attr_node *at_variable_parameter;
7293 dw_attr_node *at_virtuality;
7294 dw_attr_node *at_visibility;
7295 dw_attr_node *at_vtable_elem_location;
7296 };
7297
7298 /* Collect the attributes that we will want to use for the checksum. */
7299
7300 static void
7301 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7302 {
7303 dw_attr_node *a;
7304 unsigned ix;
7305
7306 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7307 {
7308 switch (a->dw_attr)
7309 {
7310 case DW_AT_name:
7311 attrs->at_name = a;
7312 break;
7313 case DW_AT_type:
7314 attrs->at_type = a;
7315 break;
7316 case DW_AT_friend:
7317 attrs->at_friend = a;
7318 break;
7319 case DW_AT_accessibility:
7320 attrs->at_accessibility = a;
7321 break;
7322 case DW_AT_address_class:
7323 attrs->at_address_class = a;
7324 break;
7325 case DW_AT_alignment:
7326 attrs->at_alignment = a;
7327 break;
7328 case DW_AT_allocated:
7329 attrs->at_allocated = a;
7330 break;
7331 case DW_AT_artificial:
7332 attrs->at_artificial = a;
7333 break;
7334 case DW_AT_associated:
7335 attrs->at_associated = a;
7336 break;
7337 case DW_AT_binary_scale:
7338 attrs->at_binary_scale = a;
7339 break;
7340 case DW_AT_bit_offset:
7341 attrs->at_bit_offset = a;
7342 break;
7343 case DW_AT_bit_size:
7344 attrs->at_bit_size = a;
7345 break;
7346 case DW_AT_bit_stride:
7347 attrs->at_bit_stride = a;
7348 break;
7349 case DW_AT_byte_size:
7350 attrs->at_byte_size = a;
7351 break;
7352 case DW_AT_byte_stride:
7353 attrs->at_byte_stride = a;
7354 break;
7355 case DW_AT_const_value:
7356 attrs->at_const_value = a;
7357 break;
7358 case DW_AT_containing_type:
7359 attrs->at_containing_type = a;
7360 break;
7361 case DW_AT_count:
7362 attrs->at_count = a;
7363 break;
7364 case DW_AT_data_location:
7365 attrs->at_data_location = a;
7366 break;
7367 case DW_AT_data_member_location:
7368 attrs->at_data_member_location = a;
7369 break;
7370 case DW_AT_decimal_scale:
7371 attrs->at_decimal_scale = a;
7372 break;
7373 case DW_AT_decimal_sign:
7374 attrs->at_decimal_sign = a;
7375 break;
7376 case DW_AT_default_value:
7377 attrs->at_default_value = a;
7378 break;
7379 case DW_AT_digit_count:
7380 attrs->at_digit_count = a;
7381 break;
7382 case DW_AT_discr:
7383 attrs->at_discr = a;
7384 break;
7385 case DW_AT_discr_list:
7386 attrs->at_discr_list = a;
7387 break;
7388 case DW_AT_discr_value:
7389 attrs->at_discr_value = a;
7390 break;
7391 case DW_AT_encoding:
7392 attrs->at_encoding = a;
7393 break;
7394 case DW_AT_endianity:
7395 attrs->at_endianity = a;
7396 break;
7397 case DW_AT_explicit:
7398 attrs->at_explicit = a;
7399 break;
7400 case DW_AT_is_optional:
7401 attrs->at_is_optional = a;
7402 break;
7403 case DW_AT_location:
7404 attrs->at_location = a;
7405 break;
7406 case DW_AT_lower_bound:
7407 attrs->at_lower_bound = a;
7408 break;
7409 case DW_AT_mutable:
7410 attrs->at_mutable = a;
7411 break;
7412 case DW_AT_ordering:
7413 attrs->at_ordering = a;
7414 break;
7415 case DW_AT_picture_string:
7416 attrs->at_picture_string = a;
7417 break;
7418 case DW_AT_prototyped:
7419 attrs->at_prototyped = a;
7420 break;
7421 case DW_AT_small:
7422 attrs->at_small = a;
7423 break;
7424 case DW_AT_segment:
7425 attrs->at_segment = a;
7426 break;
7427 case DW_AT_string_length:
7428 attrs->at_string_length = a;
7429 break;
7430 case DW_AT_string_length_bit_size:
7431 attrs->at_string_length_bit_size = a;
7432 break;
7433 case DW_AT_string_length_byte_size:
7434 attrs->at_string_length_byte_size = a;
7435 break;
7436 case DW_AT_threads_scaled:
7437 attrs->at_threads_scaled = a;
7438 break;
7439 case DW_AT_upper_bound:
7440 attrs->at_upper_bound = a;
7441 break;
7442 case DW_AT_use_location:
7443 attrs->at_use_location = a;
7444 break;
7445 case DW_AT_use_UTF8:
7446 attrs->at_use_UTF8 = a;
7447 break;
7448 case DW_AT_variable_parameter:
7449 attrs->at_variable_parameter = a;
7450 break;
7451 case DW_AT_virtuality:
7452 attrs->at_virtuality = a;
7453 break;
7454 case DW_AT_visibility:
7455 attrs->at_visibility = a;
7456 break;
7457 case DW_AT_vtable_elem_location:
7458 attrs->at_vtable_elem_location = a;
7459 break;
7460 default:
7461 break;
7462 }
7463 }
7464 }
7465
7466 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7467
7468 static void
7469 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7470 {
7471 dw_die_ref c;
7472 dw_die_ref decl;
7473 struct checksum_attributes attrs;
7474
7475 CHECKSUM_ULEB128 ('D');
7476 CHECKSUM_ULEB128 (die->die_tag);
7477
7478 memset (&attrs, 0, sizeof (attrs));
7479
7480 decl = get_AT_ref (die, DW_AT_specification);
7481 if (decl != NULL)
7482 collect_checksum_attributes (&attrs, decl);
7483 collect_checksum_attributes (&attrs, die);
7484
7485 CHECKSUM_ATTR (attrs.at_name);
7486 CHECKSUM_ATTR (attrs.at_accessibility);
7487 CHECKSUM_ATTR (attrs.at_address_class);
7488 CHECKSUM_ATTR (attrs.at_allocated);
7489 CHECKSUM_ATTR (attrs.at_artificial);
7490 CHECKSUM_ATTR (attrs.at_associated);
7491 CHECKSUM_ATTR (attrs.at_binary_scale);
7492 CHECKSUM_ATTR (attrs.at_bit_offset);
7493 CHECKSUM_ATTR (attrs.at_bit_size);
7494 CHECKSUM_ATTR (attrs.at_bit_stride);
7495 CHECKSUM_ATTR (attrs.at_byte_size);
7496 CHECKSUM_ATTR (attrs.at_byte_stride);
7497 CHECKSUM_ATTR (attrs.at_const_value);
7498 CHECKSUM_ATTR (attrs.at_containing_type);
7499 CHECKSUM_ATTR (attrs.at_count);
7500 CHECKSUM_ATTR (attrs.at_data_location);
7501 CHECKSUM_ATTR (attrs.at_data_member_location);
7502 CHECKSUM_ATTR (attrs.at_decimal_scale);
7503 CHECKSUM_ATTR (attrs.at_decimal_sign);
7504 CHECKSUM_ATTR (attrs.at_default_value);
7505 CHECKSUM_ATTR (attrs.at_digit_count);
7506 CHECKSUM_ATTR (attrs.at_discr);
7507 CHECKSUM_ATTR (attrs.at_discr_list);
7508 CHECKSUM_ATTR (attrs.at_discr_value);
7509 CHECKSUM_ATTR (attrs.at_encoding);
7510 CHECKSUM_ATTR (attrs.at_endianity);
7511 CHECKSUM_ATTR (attrs.at_explicit);
7512 CHECKSUM_ATTR (attrs.at_is_optional);
7513 CHECKSUM_ATTR (attrs.at_location);
7514 CHECKSUM_ATTR (attrs.at_lower_bound);
7515 CHECKSUM_ATTR (attrs.at_mutable);
7516 CHECKSUM_ATTR (attrs.at_ordering);
7517 CHECKSUM_ATTR (attrs.at_picture_string);
7518 CHECKSUM_ATTR (attrs.at_prototyped);
7519 CHECKSUM_ATTR (attrs.at_small);
7520 CHECKSUM_ATTR (attrs.at_segment);
7521 CHECKSUM_ATTR (attrs.at_string_length);
7522 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7523 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7524 CHECKSUM_ATTR (attrs.at_threads_scaled);
7525 CHECKSUM_ATTR (attrs.at_upper_bound);
7526 CHECKSUM_ATTR (attrs.at_use_location);
7527 CHECKSUM_ATTR (attrs.at_use_UTF8);
7528 CHECKSUM_ATTR (attrs.at_variable_parameter);
7529 CHECKSUM_ATTR (attrs.at_virtuality);
7530 CHECKSUM_ATTR (attrs.at_visibility);
7531 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7532 CHECKSUM_ATTR (attrs.at_type);
7533 CHECKSUM_ATTR (attrs.at_friend);
7534 CHECKSUM_ATTR (attrs.at_alignment);
7535
7536 /* Checksum the child DIEs. */
7537 c = die->die_child;
7538 if (c) do {
7539 dw_attr_node *name_attr;
7540
7541 c = c->die_sib;
7542 name_attr = get_AT (c, DW_AT_name);
7543 if (is_template_instantiation (c))
7544 {
7545 /* Ignore instantiations of member type and function templates. */
7546 }
7547 else if (name_attr != NULL
7548 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7549 {
7550 /* Use a shallow checksum for named nested types and member
7551 functions. */
7552 CHECKSUM_ULEB128 ('S');
7553 CHECKSUM_ULEB128 (c->die_tag);
7554 CHECKSUM_STRING (AT_string (name_attr));
7555 }
7556 else
7557 {
7558 /* Use a deep checksum for other children. */
7559 /* Mark this DIE so it gets processed when unmarking. */
7560 if (c->die_mark == 0)
7561 c->die_mark = -1;
7562 die_checksum_ordered (c, ctx, mark);
7563 }
7564 } while (c != die->die_child);
7565
7566 CHECKSUM_ULEB128 (0);
7567 }
7568
7569 /* Add a type name and tag to a hash. */
7570 static void
7571 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7572 {
7573 CHECKSUM_ULEB128 (tag);
7574 CHECKSUM_STRING (name);
7575 }
7576
7577 #undef CHECKSUM
7578 #undef CHECKSUM_STRING
7579 #undef CHECKSUM_ATTR
7580 #undef CHECKSUM_LEB128
7581 #undef CHECKSUM_ULEB128
7582
7583 /* Generate the type signature for DIE. This is computed by generating an
7584 MD5 checksum over the DIE's tag, its relevant attributes, and its
7585 children. Attributes that are references to other DIEs are processed
7586 by recursion, using the MARK field to prevent infinite recursion.
7587 If the DIE is nested inside a namespace or another type, we also
7588 need to include that context in the signature. The lower 64 bits
7589 of the resulting MD5 checksum comprise the signature. */
7590
7591 static void
7592 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7593 {
7594 int mark;
7595 const char *name;
7596 unsigned char checksum[16];
7597 struct md5_ctx ctx;
7598 dw_die_ref decl;
7599 dw_die_ref parent;
7600
7601 name = get_AT_string (die, DW_AT_name);
7602 decl = get_AT_ref (die, DW_AT_specification);
7603 parent = get_die_parent (die);
7604
7605 /* First, compute a signature for just the type name (and its surrounding
7606 context, if any. This is stored in the type unit DIE for link-time
7607 ODR (one-definition rule) checking. */
7608
7609 if (is_cxx () && name != NULL)
7610 {
7611 md5_init_ctx (&ctx);
7612
7613 /* Checksum the names of surrounding namespaces and structures. */
7614 if (parent != NULL)
7615 checksum_die_context (parent, &ctx);
7616
7617 /* Checksum the current DIE. */
7618 die_odr_checksum (die->die_tag, name, &ctx);
7619 md5_finish_ctx (&ctx, checksum);
7620
7621 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7622 }
7623
7624 /* Next, compute the complete type signature. */
7625
7626 md5_init_ctx (&ctx);
7627 mark = 1;
7628 die->die_mark = mark;
7629
7630 /* Checksum the names of surrounding namespaces and structures. */
7631 if (parent != NULL)
7632 checksum_die_context (parent, &ctx);
7633
7634 /* Checksum the DIE and its children. */
7635 die_checksum_ordered (die, &ctx, &mark);
7636 unmark_all_dies (die);
7637 md5_finish_ctx (&ctx, checksum);
7638
7639 /* Store the signature in the type node and link the type DIE and the
7640 type node together. */
7641 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7642 DWARF_TYPE_SIGNATURE_SIZE);
7643 die->comdat_type_p = true;
7644 die->die_id.die_type_node = type_node;
7645 type_node->type_die = die;
7646
7647 /* If the DIE is a specification, link its declaration to the type node
7648 as well. */
7649 if (decl != NULL)
7650 {
7651 decl->comdat_type_p = true;
7652 decl->die_id.die_type_node = type_node;
7653 }
7654 }
7655
7656 /* Do the location expressions look same? */
7657 static inline int
7658 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7659 {
7660 return loc1->dw_loc_opc == loc2->dw_loc_opc
7661 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7662 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7663 }
7664
7665 /* Do the values look the same? */
7666 static int
7667 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7668 {
7669 dw_loc_descr_ref loc1, loc2;
7670 rtx r1, r2;
7671
7672 if (v1->val_class != v2->val_class)
7673 return 0;
7674
7675 switch (v1->val_class)
7676 {
7677 case dw_val_class_const:
7678 case dw_val_class_const_implicit:
7679 return v1->v.val_int == v2->v.val_int;
7680 case dw_val_class_unsigned_const:
7681 case dw_val_class_unsigned_const_implicit:
7682 return v1->v.val_unsigned == v2->v.val_unsigned;
7683 case dw_val_class_const_double:
7684 return v1->v.val_double.high == v2->v.val_double.high
7685 && v1->v.val_double.low == v2->v.val_double.low;
7686 case dw_val_class_wide_int:
7687 return *v1->v.val_wide == *v2->v.val_wide;
7688 case dw_val_class_vec:
7689 if (v1->v.val_vec.length != v2->v.val_vec.length
7690 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7691 return 0;
7692 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7693 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7694 return 0;
7695 return 1;
7696 case dw_val_class_flag:
7697 return v1->v.val_flag == v2->v.val_flag;
7698 case dw_val_class_str:
7699 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7700
7701 case dw_val_class_addr:
7702 r1 = v1->v.val_addr;
7703 r2 = v2->v.val_addr;
7704 if (GET_CODE (r1) != GET_CODE (r2))
7705 return 0;
7706 return !rtx_equal_p (r1, r2);
7707
7708 case dw_val_class_offset:
7709 return v1->v.val_offset == v2->v.val_offset;
7710
7711 case dw_val_class_loc:
7712 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7713 loc1 && loc2;
7714 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7715 if (!same_loc_p (loc1, loc2, mark))
7716 return 0;
7717 return !loc1 && !loc2;
7718
7719 case dw_val_class_die_ref:
7720 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7721
7722 case dw_val_class_symview:
7723 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7724
7725 case dw_val_class_fde_ref:
7726 case dw_val_class_vms_delta:
7727 case dw_val_class_lbl_id:
7728 case dw_val_class_lineptr:
7729 case dw_val_class_macptr:
7730 case dw_val_class_loclistsptr:
7731 case dw_val_class_high_pc:
7732 return 1;
7733
7734 case dw_val_class_file:
7735 case dw_val_class_file_implicit:
7736 return v1->v.val_file == v2->v.val_file;
7737
7738 case dw_val_class_data8:
7739 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7740
7741 default:
7742 return 1;
7743 }
7744 }
7745
7746 /* Do the attributes look the same? */
7747
7748 static int
7749 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7750 {
7751 if (at1->dw_attr != at2->dw_attr)
7752 return 0;
7753
7754 /* We don't care that this was compiled with a different compiler
7755 snapshot; if the output is the same, that's what matters. */
7756 if (at1->dw_attr == DW_AT_producer)
7757 return 1;
7758
7759 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7760 }
7761
7762 /* Do the dies look the same? */
7763
7764 static int
7765 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7766 {
7767 dw_die_ref c1, c2;
7768 dw_attr_node *a1;
7769 unsigned ix;
7770
7771 /* To avoid infinite recursion. */
7772 if (die1->die_mark)
7773 return die1->die_mark == die2->die_mark;
7774 die1->die_mark = die2->die_mark = ++(*mark);
7775
7776 if (die1->die_tag != die2->die_tag)
7777 return 0;
7778
7779 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7780 return 0;
7781
7782 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7783 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7784 return 0;
7785
7786 c1 = die1->die_child;
7787 c2 = die2->die_child;
7788 if (! c1)
7789 {
7790 if (c2)
7791 return 0;
7792 }
7793 else
7794 for (;;)
7795 {
7796 if (!same_die_p (c1, c2, mark))
7797 return 0;
7798 c1 = c1->die_sib;
7799 c2 = c2->die_sib;
7800 if (c1 == die1->die_child)
7801 {
7802 if (c2 == die2->die_child)
7803 break;
7804 else
7805 return 0;
7806 }
7807 }
7808
7809 return 1;
7810 }
7811
7812 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7813 children, and set die_symbol. */
7814
7815 static void
7816 compute_comp_unit_symbol (dw_die_ref unit_die)
7817 {
7818 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7819 const char *base = die_name ? lbasename (die_name) : "anonymous";
7820 char *name = XALLOCAVEC (char, strlen (base) + 64);
7821 char *p;
7822 int i, mark;
7823 unsigned char checksum[16];
7824 struct md5_ctx ctx;
7825
7826 /* Compute the checksum of the DIE, then append part of it as hex digits to
7827 the name filename of the unit. */
7828
7829 md5_init_ctx (&ctx);
7830 mark = 0;
7831 die_checksum (unit_die, &ctx, &mark);
7832 unmark_all_dies (unit_die);
7833 md5_finish_ctx (&ctx, checksum);
7834
7835 /* When we this for comp_unit_die () we have a DW_AT_name that might
7836 not start with a letter but with anything valid for filenames and
7837 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7838 character is not a letter. */
7839 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7840 clean_symbol_name (name);
7841
7842 p = name + strlen (name);
7843 for (i = 0; i < 4; i++)
7844 {
7845 sprintf (p, "%.2x", checksum[i]);
7846 p += 2;
7847 }
7848
7849 unit_die->die_id.die_symbol = xstrdup (name);
7850 }
7851
7852 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7853
7854 static int
7855 is_type_die (dw_die_ref die)
7856 {
7857 switch (die->die_tag)
7858 {
7859 case DW_TAG_array_type:
7860 case DW_TAG_class_type:
7861 case DW_TAG_interface_type:
7862 case DW_TAG_enumeration_type:
7863 case DW_TAG_pointer_type:
7864 case DW_TAG_reference_type:
7865 case DW_TAG_rvalue_reference_type:
7866 case DW_TAG_string_type:
7867 case DW_TAG_structure_type:
7868 case DW_TAG_subroutine_type:
7869 case DW_TAG_union_type:
7870 case DW_TAG_ptr_to_member_type:
7871 case DW_TAG_set_type:
7872 case DW_TAG_subrange_type:
7873 case DW_TAG_base_type:
7874 case DW_TAG_const_type:
7875 case DW_TAG_file_type:
7876 case DW_TAG_packed_type:
7877 case DW_TAG_volatile_type:
7878 case DW_TAG_typedef:
7879 return 1;
7880 default:
7881 return 0;
7882 }
7883 }
7884
7885 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7886 Basically, we want to choose the bits that are likely to be shared between
7887 compilations (types) and leave out the bits that are specific to individual
7888 compilations (functions). */
7889
7890 static int
7891 is_comdat_die (dw_die_ref c)
7892 {
7893 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7894 we do for stabs. The advantage is a greater likelihood of sharing between
7895 objects that don't include headers in the same order (and therefore would
7896 put the base types in a different comdat). jason 8/28/00 */
7897
7898 if (c->die_tag == DW_TAG_base_type)
7899 return 0;
7900
7901 if (c->die_tag == DW_TAG_pointer_type
7902 || c->die_tag == DW_TAG_reference_type
7903 || c->die_tag == DW_TAG_rvalue_reference_type
7904 || c->die_tag == DW_TAG_const_type
7905 || c->die_tag == DW_TAG_volatile_type)
7906 {
7907 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7908
7909 return t ? is_comdat_die (t) : 0;
7910 }
7911
7912 return is_type_die (c);
7913 }
7914
7915 /* Returns true iff C is a compile-unit DIE. */
7916
7917 static inline bool
7918 is_cu_die (dw_die_ref c)
7919 {
7920 return c && (c->die_tag == DW_TAG_compile_unit
7921 || c->die_tag == DW_TAG_skeleton_unit);
7922 }
7923
7924 /* Returns true iff C is a unit DIE of some sort. */
7925
7926 static inline bool
7927 is_unit_die (dw_die_ref c)
7928 {
7929 return c && (c->die_tag == DW_TAG_compile_unit
7930 || c->die_tag == DW_TAG_partial_unit
7931 || c->die_tag == DW_TAG_type_unit
7932 || c->die_tag == DW_TAG_skeleton_unit);
7933 }
7934
7935 /* Returns true iff C is a namespace DIE. */
7936
7937 static inline bool
7938 is_namespace_die (dw_die_ref c)
7939 {
7940 return c && c->die_tag == DW_TAG_namespace;
7941 }
7942
7943 /* Returns true iff C is a class or structure DIE. */
7944
7945 static inline bool
7946 is_class_die (dw_die_ref c)
7947 {
7948 return c && (c->die_tag == DW_TAG_class_type
7949 || c->die_tag == DW_TAG_structure_type);
7950 }
7951
7952 /* Return non-zero if this DIE is a template parameter. */
7953
7954 static inline bool
7955 is_template_parameter (dw_die_ref die)
7956 {
7957 switch (die->die_tag)
7958 {
7959 case DW_TAG_template_type_param:
7960 case DW_TAG_template_value_param:
7961 case DW_TAG_GNU_template_template_param:
7962 case DW_TAG_GNU_template_parameter_pack:
7963 return true;
7964 default:
7965 return false;
7966 }
7967 }
7968
7969 /* Return non-zero if this DIE represents a template instantiation. */
7970
7971 static inline bool
7972 is_template_instantiation (dw_die_ref die)
7973 {
7974 dw_die_ref c;
7975
7976 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7977 return false;
7978 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7979 return false;
7980 }
7981
7982 static char *
7983 gen_internal_sym (const char *prefix)
7984 {
7985 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7986
7987 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7988 return xstrdup (buf);
7989 }
7990
7991 /* Return non-zero if this DIE is a declaration. */
7992
7993 static int
7994 is_declaration_die (dw_die_ref die)
7995 {
7996 dw_attr_node *a;
7997 unsigned ix;
7998
7999 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8000 if (a->dw_attr == DW_AT_declaration)
8001 return 1;
8002
8003 return 0;
8004 }
8005
8006 /* Return non-zero if this DIE is nested inside a subprogram. */
8007
8008 static int
8009 is_nested_in_subprogram (dw_die_ref die)
8010 {
8011 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
8012
8013 if (decl == NULL)
8014 decl = die;
8015 return local_scope_p (decl);
8016 }
8017
8018 /* Return non-zero if this DIE contains a defining declaration of a
8019 subprogram. */
8020
8021 static int
8022 contains_subprogram_definition (dw_die_ref die)
8023 {
8024 dw_die_ref c;
8025
8026 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8027 return 1;
8028 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8029 return 0;
8030 }
8031
8032 /* Return non-zero if this is a type DIE that should be moved to a
8033 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8034 unit type. */
8035
8036 static int
8037 should_move_die_to_comdat (dw_die_ref die)
8038 {
8039 switch (die->die_tag)
8040 {
8041 case DW_TAG_class_type:
8042 case DW_TAG_structure_type:
8043 case DW_TAG_enumeration_type:
8044 case DW_TAG_union_type:
8045 /* Don't move declarations, inlined instances, types nested in a
8046 subprogram, or types that contain subprogram definitions. */
8047 if (is_declaration_die (die)
8048 || get_AT (die, DW_AT_abstract_origin)
8049 || is_nested_in_subprogram (die)
8050 || contains_subprogram_definition (die))
8051 return 0;
8052 return 1;
8053 case DW_TAG_array_type:
8054 case DW_TAG_interface_type:
8055 case DW_TAG_pointer_type:
8056 case DW_TAG_reference_type:
8057 case DW_TAG_rvalue_reference_type:
8058 case DW_TAG_string_type:
8059 case DW_TAG_subroutine_type:
8060 case DW_TAG_ptr_to_member_type:
8061 case DW_TAG_set_type:
8062 case DW_TAG_subrange_type:
8063 case DW_TAG_base_type:
8064 case DW_TAG_const_type:
8065 case DW_TAG_file_type:
8066 case DW_TAG_packed_type:
8067 case DW_TAG_volatile_type:
8068 case DW_TAG_typedef:
8069 default:
8070 return 0;
8071 }
8072 }
8073
8074 /* Make a clone of DIE. */
8075
8076 static dw_die_ref
8077 clone_die (dw_die_ref die)
8078 {
8079 dw_die_ref clone = new_die_raw (die->die_tag);
8080 dw_attr_node *a;
8081 unsigned ix;
8082
8083 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8084 add_dwarf_attr (clone, a);
8085
8086 return clone;
8087 }
8088
8089 /* Make a clone of the tree rooted at DIE. */
8090
8091 static dw_die_ref
8092 clone_tree (dw_die_ref die)
8093 {
8094 dw_die_ref c;
8095 dw_die_ref clone = clone_die (die);
8096
8097 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8098
8099 return clone;
8100 }
8101
8102 /* Make a clone of DIE as a declaration. */
8103
8104 static dw_die_ref
8105 clone_as_declaration (dw_die_ref die)
8106 {
8107 dw_die_ref clone;
8108 dw_die_ref decl;
8109 dw_attr_node *a;
8110 unsigned ix;
8111
8112 /* If the DIE is already a declaration, just clone it. */
8113 if (is_declaration_die (die))
8114 return clone_die (die);
8115
8116 /* If the DIE is a specification, just clone its declaration DIE. */
8117 decl = get_AT_ref (die, DW_AT_specification);
8118 if (decl != NULL)
8119 {
8120 clone = clone_die (decl);
8121 if (die->comdat_type_p)
8122 add_AT_die_ref (clone, DW_AT_signature, die);
8123 return clone;
8124 }
8125
8126 clone = new_die_raw (die->die_tag);
8127
8128 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8129 {
8130 /* We don't want to copy over all attributes.
8131 For example we don't want DW_AT_byte_size because otherwise we will no
8132 longer have a declaration and GDB will treat it as a definition. */
8133
8134 switch (a->dw_attr)
8135 {
8136 case DW_AT_abstract_origin:
8137 case DW_AT_artificial:
8138 case DW_AT_containing_type:
8139 case DW_AT_external:
8140 case DW_AT_name:
8141 case DW_AT_type:
8142 case DW_AT_virtuality:
8143 case DW_AT_linkage_name:
8144 case DW_AT_MIPS_linkage_name:
8145 add_dwarf_attr (clone, a);
8146 break;
8147 case DW_AT_byte_size:
8148 case DW_AT_alignment:
8149 default:
8150 break;
8151 }
8152 }
8153
8154 if (die->comdat_type_p)
8155 add_AT_die_ref (clone, DW_AT_signature, die);
8156
8157 add_AT_flag (clone, DW_AT_declaration, 1);
8158 return clone;
8159 }
8160
8161
8162 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8163
8164 struct decl_table_entry
8165 {
8166 dw_die_ref orig;
8167 dw_die_ref copy;
8168 };
8169
8170 /* Helpers to manipulate hash table of copied declarations. */
8171
8172 /* Hashtable helpers. */
8173
8174 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8175 {
8176 typedef die_struct *compare_type;
8177 static inline hashval_t hash (const decl_table_entry *);
8178 static inline bool equal (const decl_table_entry *, const die_struct *);
8179 };
8180
8181 inline hashval_t
8182 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8183 {
8184 return htab_hash_pointer (entry->orig);
8185 }
8186
8187 inline bool
8188 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8189 const die_struct *entry2)
8190 {
8191 return entry1->orig == entry2;
8192 }
8193
8194 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8195
8196 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8197 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8198 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8199 to check if the ancestor has already been copied into UNIT. */
8200
8201 static dw_die_ref
8202 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8203 decl_hash_type *decl_table)
8204 {
8205 dw_die_ref parent = die->die_parent;
8206 dw_die_ref new_parent = unit;
8207 dw_die_ref copy;
8208 decl_table_entry **slot = NULL;
8209 struct decl_table_entry *entry = NULL;
8210
8211 if (decl_table)
8212 {
8213 /* Check if the entry has already been copied to UNIT. */
8214 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8215 INSERT);
8216 if (*slot != HTAB_EMPTY_ENTRY)
8217 {
8218 entry = *slot;
8219 return entry->copy;
8220 }
8221
8222 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8223 entry = XCNEW (struct decl_table_entry);
8224 entry->orig = die;
8225 entry->copy = NULL;
8226 *slot = entry;
8227 }
8228
8229 if (parent != NULL)
8230 {
8231 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8232 if (spec != NULL)
8233 parent = spec;
8234 if (!is_unit_die (parent))
8235 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8236 }
8237
8238 copy = clone_as_declaration (die);
8239 add_child_die (new_parent, copy);
8240
8241 if (decl_table)
8242 {
8243 /* Record the pointer to the copy. */
8244 entry->copy = copy;
8245 }
8246
8247 return copy;
8248 }
8249 /* Copy the declaration context to the new type unit DIE. This includes
8250 any surrounding namespace or type declarations. If the DIE has an
8251 AT_specification attribute, it also includes attributes and children
8252 attached to the specification, and returns a pointer to the original
8253 parent of the declaration DIE. Returns NULL otherwise. */
8254
8255 static dw_die_ref
8256 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8257 {
8258 dw_die_ref decl;
8259 dw_die_ref new_decl;
8260 dw_die_ref orig_parent = NULL;
8261
8262 decl = get_AT_ref (die, DW_AT_specification);
8263 if (decl == NULL)
8264 decl = die;
8265 else
8266 {
8267 unsigned ix;
8268 dw_die_ref c;
8269 dw_attr_node *a;
8270
8271 /* The original DIE will be changed to a declaration, and must
8272 be moved to be a child of the original declaration DIE. */
8273 orig_parent = decl->die_parent;
8274
8275 /* Copy the type node pointer from the new DIE to the original
8276 declaration DIE so we can forward references later. */
8277 decl->comdat_type_p = true;
8278 decl->die_id.die_type_node = die->die_id.die_type_node;
8279
8280 remove_AT (die, DW_AT_specification);
8281
8282 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8283 {
8284 if (a->dw_attr != DW_AT_name
8285 && a->dw_attr != DW_AT_declaration
8286 && a->dw_attr != DW_AT_external)
8287 add_dwarf_attr (die, a);
8288 }
8289
8290 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8291 }
8292
8293 if (decl->die_parent != NULL
8294 && !is_unit_die (decl->die_parent))
8295 {
8296 new_decl = copy_ancestor_tree (unit, decl, NULL);
8297 if (new_decl != NULL)
8298 {
8299 remove_AT (new_decl, DW_AT_signature);
8300 add_AT_specification (die, new_decl);
8301 }
8302 }
8303
8304 return orig_parent;
8305 }
8306
8307 /* Generate the skeleton ancestor tree for the given NODE, then clone
8308 the DIE and add the clone into the tree. */
8309
8310 static void
8311 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8312 {
8313 if (node->new_die != NULL)
8314 return;
8315
8316 node->new_die = clone_as_declaration (node->old_die);
8317
8318 if (node->parent != NULL)
8319 {
8320 generate_skeleton_ancestor_tree (node->parent);
8321 add_child_die (node->parent->new_die, node->new_die);
8322 }
8323 }
8324
8325 /* Generate a skeleton tree of DIEs containing any declarations that are
8326 found in the original tree. We traverse the tree looking for declaration
8327 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8328
8329 static void
8330 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8331 {
8332 skeleton_chain_node node;
8333 dw_die_ref c;
8334 dw_die_ref first;
8335 dw_die_ref prev = NULL;
8336 dw_die_ref next = NULL;
8337
8338 node.parent = parent;
8339
8340 first = c = parent->old_die->die_child;
8341 if (c)
8342 next = c->die_sib;
8343 if (c) do {
8344 if (prev == NULL || prev->die_sib == c)
8345 prev = c;
8346 c = next;
8347 next = (c == first ? NULL : c->die_sib);
8348 node.old_die = c;
8349 node.new_die = NULL;
8350 if (is_declaration_die (c))
8351 {
8352 if (is_template_instantiation (c))
8353 {
8354 /* Instantiated templates do not need to be cloned into the
8355 type unit. Just move the DIE and its children back to
8356 the skeleton tree (in the main CU). */
8357 remove_child_with_prev (c, prev);
8358 add_child_die (parent->new_die, c);
8359 c = prev;
8360 }
8361 else if (c->comdat_type_p)
8362 {
8363 /* This is the skeleton of earlier break_out_comdat_types
8364 type. Clone the existing DIE, but keep the children
8365 under the original (which is in the main CU). */
8366 dw_die_ref clone = clone_die (c);
8367
8368 replace_child (c, clone, prev);
8369 generate_skeleton_ancestor_tree (parent);
8370 add_child_die (parent->new_die, c);
8371 c = clone;
8372 continue;
8373 }
8374 else
8375 {
8376 /* Clone the existing DIE, move the original to the skeleton
8377 tree (which is in the main CU), and put the clone, with
8378 all the original's children, where the original came from
8379 (which is about to be moved to the type unit). */
8380 dw_die_ref clone = clone_die (c);
8381 move_all_children (c, clone);
8382
8383 /* If the original has a DW_AT_object_pointer attribute,
8384 it would now point to a child DIE just moved to the
8385 cloned tree, so we need to remove that attribute from
8386 the original. */
8387 remove_AT (c, DW_AT_object_pointer);
8388
8389 replace_child (c, clone, prev);
8390 generate_skeleton_ancestor_tree (parent);
8391 add_child_die (parent->new_die, c);
8392 node.old_die = clone;
8393 node.new_die = c;
8394 c = clone;
8395 }
8396 }
8397 generate_skeleton_bottom_up (&node);
8398 } while (next != NULL);
8399 }
8400
8401 /* Wrapper function for generate_skeleton_bottom_up. */
8402
8403 static dw_die_ref
8404 generate_skeleton (dw_die_ref die)
8405 {
8406 skeleton_chain_node node;
8407
8408 node.old_die = die;
8409 node.new_die = NULL;
8410 node.parent = NULL;
8411
8412 /* If this type definition is nested inside another type,
8413 and is not an instantiation of a template, always leave
8414 at least a declaration in its place. */
8415 if (die->die_parent != NULL
8416 && is_type_die (die->die_parent)
8417 && !is_template_instantiation (die))
8418 node.new_die = clone_as_declaration (die);
8419
8420 generate_skeleton_bottom_up (&node);
8421 return node.new_die;
8422 }
8423
8424 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8425 declaration. The original DIE is moved to a new compile unit so that
8426 existing references to it follow it to the new location. If any of the
8427 original DIE's descendants is a declaration, we need to replace the
8428 original DIE with a skeleton tree and move the declarations back into the
8429 skeleton tree. */
8430
8431 static dw_die_ref
8432 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8433 dw_die_ref prev)
8434 {
8435 dw_die_ref skeleton, orig_parent;
8436
8437 /* Copy the declaration context to the type unit DIE. If the returned
8438 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8439 that DIE. */
8440 orig_parent = copy_declaration_context (unit, child);
8441
8442 skeleton = generate_skeleton (child);
8443 if (skeleton == NULL)
8444 remove_child_with_prev (child, prev);
8445 else
8446 {
8447 skeleton->comdat_type_p = true;
8448 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8449
8450 /* If the original DIE was a specification, we need to put
8451 the skeleton under the parent DIE of the declaration.
8452 This leaves the original declaration in the tree, but
8453 it will be pruned later since there are no longer any
8454 references to it. */
8455 if (orig_parent != NULL)
8456 {
8457 remove_child_with_prev (child, prev);
8458 add_child_die (orig_parent, skeleton);
8459 }
8460 else
8461 replace_child (child, skeleton, prev);
8462 }
8463
8464 return skeleton;
8465 }
8466
8467 static void
8468 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8469 comdat_type_node *type_node,
8470 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8471
8472 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8473 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8474 DWARF procedure references in the DW_AT_location attribute. */
8475
8476 static dw_die_ref
8477 copy_dwarf_procedure (dw_die_ref die,
8478 comdat_type_node *type_node,
8479 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8480 {
8481 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8482
8483 /* DWARF procedures are not supposed to have children... */
8484 gcc_assert (die->die_child == NULL);
8485
8486 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8487 gcc_assert (vec_safe_length (die->die_attr) == 1
8488 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8489
8490 /* Do not copy more than once DWARF procedures. */
8491 bool existed;
8492 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8493 if (existed)
8494 return die_copy;
8495
8496 die_copy = clone_die (die);
8497 add_child_die (type_node->root_die, die_copy);
8498 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8499 return die_copy;
8500 }
8501
8502 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8503 procedures in DIE's attributes. */
8504
8505 static void
8506 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8507 comdat_type_node *type_node,
8508 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8509 {
8510 dw_attr_node *a;
8511 unsigned i;
8512
8513 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8514 {
8515 dw_loc_descr_ref loc;
8516
8517 if (a->dw_attr_val.val_class != dw_val_class_loc)
8518 continue;
8519
8520 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8521 {
8522 switch (loc->dw_loc_opc)
8523 {
8524 case DW_OP_call2:
8525 case DW_OP_call4:
8526 case DW_OP_call_ref:
8527 gcc_assert (loc->dw_loc_oprnd1.val_class
8528 == dw_val_class_die_ref);
8529 loc->dw_loc_oprnd1.v.val_die_ref.die
8530 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8531 type_node,
8532 copied_dwarf_procs);
8533
8534 default:
8535 break;
8536 }
8537 }
8538 }
8539 }
8540
8541 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8542 rewrite references to point to the copies.
8543
8544 References are looked for in DIE's attributes and recursively in all its
8545 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8546 mapping from old DWARF procedures to their copy. It is used not to copy
8547 twice the same DWARF procedure under TYPE_NODE. */
8548
8549 static void
8550 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8551 comdat_type_node *type_node,
8552 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8553 {
8554 dw_die_ref c;
8555
8556 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8557 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8558 type_node,
8559 copied_dwarf_procs));
8560 }
8561
8562 /* Traverse the DIE and set up additional .debug_types or .debug_info
8563 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8564 section. */
8565
8566 static void
8567 break_out_comdat_types (dw_die_ref die)
8568 {
8569 dw_die_ref c;
8570 dw_die_ref first;
8571 dw_die_ref prev = NULL;
8572 dw_die_ref next = NULL;
8573 dw_die_ref unit = NULL;
8574
8575 first = c = die->die_child;
8576 if (c)
8577 next = c->die_sib;
8578 if (c) do {
8579 if (prev == NULL || prev->die_sib == c)
8580 prev = c;
8581 c = next;
8582 next = (c == first ? NULL : c->die_sib);
8583 if (should_move_die_to_comdat (c))
8584 {
8585 dw_die_ref replacement;
8586 comdat_type_node *type_node;
8587
8588 /* Break out nested types into their own type units. */
8589 break_out_comdat_types (c);
8590
8591 /* Create a new type unit DIE as the root for the new tree, and
8592 add it to the list of comdat types. */
8593 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8594 add_AT_unsigned (unit, DW_AT_language,
8595 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8596 type_node = ggc_cleared_alloc<comdat_type_node> ();
8597 type_node->root_die = unit;
8598 type_node->next = comdat_type_list;
8599 comdat_type_list = type_node;
8600
8601 /* Generate the type signature. */
8602 generate_type_signature (c, type_node);
8603
8604 /* Copy the declaration context, attributes, and children of the
8605 declaration into the new type unit DIE, then remove this DIE
8606 from the main CU (or replace it with a skeleton if necessary). */
8607 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8608 type_node->skeleton_die = replacement;
8609
8610 /* Add the DIE to the new compunit. */
8611 add_child_die (unit, c);
8612
8613 /* Types can reference DWARF procedures for type size or data location
8614 expressions. Calls in DWARF expressions cannot target procedures
8615 that are not in the same section. So we must copy DWARF procedures
8616 along with this type and then rewrite references to them. */
8617 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8618 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8619
8620 if (replacement != NULL)
8621 c = replacement;
8622 }
8623 else if (c->die_tag == DW_TAG_namespace
8624 || c->die_tag == DW_TAG_class_type
8625 || c->die_tag == DW_TAG_structure_type
8626 || c->die_tag == DW_TAG_union_type)
8627 {
8628 /* Look for nested types that can be broken out. */
8629 break_out_comdat_types (c);
8630 }
8631 } while (next != NULL);
8632 }
8633
8634 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8635 Enter all the cloned children into the hash table decl_table. */
8636
8637 static dw_die_ref
8638 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8639 {
8640 dw_die_ref c;
8641 dw_die_ref clone;
8642 struct decl_table_entry *entry;
8643 decl_table_entry **slot;
8644
8645 if (die->die_tag == DW_TAG_subprogram)
8646 clone = clone_as_declaration (die);
8647 else
8648 clone = clone_die (die);
8649
8650 slot = decl_table->find_slot_with_hash (die,
8651 htab_hash_pointer (die), INSERT);
8652
8653 /* Assert that DIE isn't in the hash table yet. If it would be there
8654 before, the ancestors would be necessarily there as well, therefore
8655 clone_tree_partial wouldn't be called. */
8656 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8657
8658 entry = XCNEW (struct decl_table_entry);
8659 entry->orig = die;
8660 entry->copy = clone;
8661 *slot = entry;
8662
8663 if (die->die_tag != DW_TAG_subprogram)
8664 FOR_EACH_CHILD (die, c,
8665 add_child_die (clone, clone_tree_partial (c, decl_table)));
8666
8667 return clone;
8668 }
8669
8670 /* Walk the DIE and its children, looking for references to incomplete
8671 or trivial types that are unmarked (i.e., that are not in the current
8672 type_unit). */
8673
8674 static void
8675 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8676 {
8677 dw_die_ref c;
8678 dw_attr_node *a;
8679 unsigned ix;
8680
8681 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8682 {
8683 if (AT_class (a) == dw_val_class_die_ref)
8684 {
8685 dw_die_ref targ = AT_ref (a);
8686 decl_table_entry **slot;
8687 struct decl_table_entry *entry;
8688
8689 if (targ->die_mark != 0 || targ->comdat_type_p)
8690 continue;
8691
8692 slot = decl_table->find_slot_with_hash (targ,
8693 htab_hash_pointer (targ),
8694 INSERT);
8695
8696 if (*slot != HTAB_EMPTY_ENTRY)
8697 {
8698 /* TARG has already been copied, so we just need to
8699 modify the reference to point to the copy. */
8700 entry = *slot;
8701 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8702 }
8703 else
8704 {
8705 dw_die_ref parent = unit;
8706 dw_die_ref copy = clone_die (targ);
8707
8708 /* Record in DECL_TABLE that TARG has been copied.
8709 Need to do this now, before the recursive call,
8710 because DECL_TABLE may be expanded and SLOT
8711 would no longer be a valid pointer. */
8712 entry = XCNEW (struct decl_table_entry);
8713 entry->orig = targ;
8714 entry->copy = copy;
8715 *slot = entry;
8716
8717 /* If TARG is not a declaration DIE, we need to copy its
8718 children. */
8719 if (!is_declaration_die (targ))
8720 {
8721 FOR_EACH_CHILD (
8722 targ, c,
8723 add_child_die (copy,
8724 clone_tree_partial (c, decl_table)));
8725 }
8726
8727 /* Make sure the cloned tree is marked as part of the
8728 type unit. */
8729 mark_dies (copy);
8730
8731 /* If TARG has surrounding context, copy its ancestor tree
8732 into the new type unit. */
8733 if (targ->die_parent != NULL
8734 && !is_unit_die (targ->die_parent))
8735 parent = copy_ancestor_tree (unit, targ->die_parent,
8736 decl_table);
8737
8738 add_child_die (parent, copy);
8739 a->dw_attr_val.v.val_die_ref.die = copy;
8740
8741 /* Make sure the newly-copied DIE is walked. If it was
8742 installed in a previously-added context, it won't
8743 get visited otherwise. */
8744 if (parent != unit)
8745 {
8746 /* Find the highest point of the newly-added tree,
8747 mark each node along the way, and walk from there. */
8748 parent->die_mark = 1;
8749 while (parent->die_parent
8750 && parent->die_parent->die_mark == 0)
8751 {
8752 parent = parent->die_parent;
8753 parent->die_mark = 1;
8754 }
8755 copy_decls_walk (unit, parent, decl_table);
8756 }
8757 }
8758 }
8759 }
8760
8761 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8762 }
8763
8764 /* Copy declarations for "unworthy" types into the new comdat section.
8765 Incomplete types, modified types, and certain other types aren't broken
8766 out into comdat sections of their own, so they don't have a signature,
8767 and we need to copy the declaration into the same section so that we
8768 don't have an external reference. */
8769
8770 static void
8771 copy_decls_for_unworthy_types (dw_die_ref unit)
8772 {
8773 mark_dies (unit);
8774 decl_hash_type decl_table (10);
8775 copy_decls_walk (unit, unit, &decl_table);
8776 unmark_dies (unit);
8777 }
8778
8779 /* Traverse the DIE and add a sibling attribute if it may have the
8780 effect of speeding up access to siblings. To save some space,
8781 avoid generating sibling attributes for DIE's without children. */
8782
8783 static void
8784 add_sibling_attributes (dw_die_ref die)
8785 {
8786 dw_die_ref c;
8787
8788 if (! die->die_child)
8789 return;
8790
8791 if (die->die_parent && die != die->die_parent->die_child)
8792 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8793
8794 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8795 }
8796
8797 /* Output all location lists for the DIE and its children. */
8798
8799 static void
8800 output_location_lists (dw_die_ref die)
8801 {
8802 dw_die_ref c;
8803 dw_attr_node *a;
8804 unsigned ix;
8805
8806 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8807 if (AT_class (a) == dw_val_class_loc_list)
8808 output_loc_list (AT_loc_list (a));
8809
8810 FOR_EACH_CHILD (die, c, output_location_lists (c));
8811 }
8812
8813 /* During assign_location_list_indexes and output_loclists_offset the
8814 current index, after it the number of assigned indexes (i.e. how
8815 large the .debug_loclists* offset table should be). */
8816 static unsigned int loc_list_idx;
8817
8818 /* Output all location list offsets for the DIE and its children. */
8819
8820 static void
8821 output_loclists_offsets (dw_die_ref die)
8822 {
8823 dw_die_ref c;
8824 dw_attr_node *a;
8825 unsigned ix;
8826
8827 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8828 if (AT_class (a) == dw_val_class_loc_list)
8829 {
8830 dw_loc_list_ref l = AT_loc_list (a);
8831 if (l->offset_emitted)
8832 continue;
8833 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8834 loc_section_label, NULL);
8835 gcc_assert (l->hash == loc_list_idx);
8836 loc_list_idx++;
8837 l->offset_emitted = true;
8838 }
8839
8840 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8841 }
8842
8843 /* Recursively set indexes of location lists. */
8844
8845 static void
8846 assign_location_list_indexes (dw_die_ref die)
8847 {
8848 dw_die_ref c;
8849 dw_attr_node *a;
8850 unsigned ix;
8851
8852 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8853 if (AT_class (a) == dw_val_class_loc_list)
8854 {
8855 dw_loc_list_ref list = AT_loc_list (a);
8856 if (!list->num_assigned)
8857 {
8858 list->num_assigned = true;
8859 list->hash = loc_list_idx++;
8860 }
8861 }
8862
8863 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8864 }
8865
8866 /* We want to limit the number of external references, because they are
8867 larger than local references: a relocation takes multiple words, and
8868 even a sig8 reference is always eight bytes, whereas a local reference
8869 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8870 So if we encounter multiple external references to the same type DIE, we
8871 make a local typedef stub for it and redirect all references there.
8872
8873 This is the element of the hash table for keeping track of these
8874 references. */
8875
8876 struct external_ref
8877 {
8878 dw_die_ref type;
8879 dw_die_ref stub;
8880 unsigned n_refs;
8881 };
8882
8883 /* Hashtable helpers. */
8884
8885 struct external_ref_hasher : free_ptr_hash <external_ref>
8886 {
8887 static inline hashval_t hash (const external_ref *);
8888 static inline bool equal (const external_ref *, const external_ref *);
8889 };
8890
8891 inline hashval_t
8892 external_ref_hasher::hash (const external_ref *r)
8893 {
8894 dw_die_ref die = r->type;
8895 hashval_t h = 0;
8896
8897 /* We can't use the address of the DIE for hashing, because
8898 that will make the order of the stub DIEs non-deterministic. */
8899 if (! die->comdat_type_p)
8900 /* We have a symbol; use it to compute a hash. */
8901 h = htab_hash_string (die->die_id.die_symbol);
8902 else
8903 {
8904 /* We have a type signature; use a subset of the bits as the hash.
8905 The 8-byte signature is at least as large as hashval_t. */
8906 comdat_type_node *type_node = die->die_id.die_type_node;
8907 memcpy (&h, type_node->signature, sizeof (h));
8908 }
8909 return h;
8910 }
8911
8912 inline bool
8913 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8914 {
8915 return r1->type == r2->type;
8916 }
8917
8918 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8919
8920 /* Return a pointer to the external_ref for references to DIE. */
8921
8922 static struct external_ref *
8923 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8924 {
8925 struct external_ref ref, *ref_p;
8926 external_ref **slot;
8927
8928 ref.type = die;
8929 slot = map->find_slot (&ref, INSERT);
8930 if (*slot != HTAB_EMPTY_ENTRY)
8931 return *slot;
8932
8933 ref_p = XCNEW (struct external_ref);
8934 ref_p->type = die;
8935 *slot = ref_p;
8936 return ref_p;
8937 }
8938
8939 /* Subroutine of optimize_external_refs, below.
8940
8941 If we see a type skeleton, record it as our stub. If we see external
8942 references, remember how many we've seen. */
8943
8944 static void
8945 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8946 {
8947 dw_die_ref c;
8948 dw_attr_node *a;
8949 unsigned ix;
8950 struct external_ref *ref_p;
8951
8952 if (is_type_die (die)
8953 && (c = get_AT_ref (die, DW_AT_signature)))
8954 {
8955 /* This is a local skeleton; use it for local references. */
8956 ref_p = lookup_external_ref (map, c);
8957 ref_p->stub = die;
8958 }
8959
8960 /* Scan the DIE references, and remember any that refer to DIEs from
8961 other CUs (i.e. those which are not marked). */
8962 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8963 if (AT_class (a) == dw_val_class_die_ref
8964 && (c = AT_ref (a))->die_mark == 0
8965 && is_type_die (c))
8966 {
8967 ref_p = lookup_external_ref (map, c);
8968 ref_p->n_refs++;
8969 }
8970
8971 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8972 }
8973
8974 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8975 points to an external_ref, DATA is the CU we're processing. If we don't
8976 already have a local stub, and we have multiple refs, build a stub. */
8977
8978 int
8979 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8980 {
8981 struct external_ref *ref_p = *slot;
8982
8983 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8984 {
8985 /* We have multiple references to this type, so build a small stub.
8986 Both of these forms are a bit dodgy from the perspective of the
8987 DWARF standard, since technically they should have names. */
8988 dw_die_ref cu = data;
8989 dw_die_ref type = ref_p->type;
8990 dw_die_ref stub = NULL;
8991
8992 if (type->comdat_type_p)
8993 {
8994 /* If we refer to this type via sig8, use AT_signature. */
8995 stub = new_die (type->die_tag, cu, NULL_TREE);
8996 add_AT_die_ref (stub, DW_AT_signature, type);
8997 }
8998 else
8999 {
9000 /* Otherwise, use a typedef with no name. */
9001 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
9002 add_AT_die_ref (stub, DW_AT_type, type);
9003 }
9004
9005 stub->die_mark++;
9006 ref_p->stub = stub;
9007 }
9008 return 1;
9009 }
9010
9011 /* DIE is a unit; look through all the DIE references to see if there are
9012 any external references to types, and if so, create local stubs for
9013 them which will be applied in build_abbrev_table. This is useful because
9014 references to local DIEs are smaller. */
9015
9016 static external_ref_hash_type *
9017 optimize_external_refs (dw_die_ref die)
9018 {
9019 external_ref_hash_type *map = new external_ref_hash_type (10);
9020 optimize_external_refs_1 (die, map);
9021 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9022 return map;
9023 }
9024
9025 /* The following 3 variables are temporaries that are computed only during the
9026 build_abbrev_table call and used and released during the following
9027 optimize_abbrev_table call. */
9028
9029 /* First abbrev_id that can be optimized based on usage. */
9030 static unsigned int abbrev_opt_start;
9031
9032 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9033 abbrev_id smaller than this, because they must be already sized
9034 during build_abbrev_table). */
9035 static unsigned int abbrev_opt_base_type_end;
9036
9037 /* Vector of usage counts during build_abbrev_table. Indexed by
9038 abbrev_id - abbrev_opt_start. */
9039 static vec<unsigned int> abbrev_usage_count;
9040
9041 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9042 static vec<dw_die_ref> sorted_abbrev_dies;
9043
9044 /* The format of each DIE (and its attribute value pairs) is encoded in an
9045 abbreviation table. This routine builds the abbreviation table and assigns
9046 a unique abbreviation id for each abbreviation entry. The children of each
9047 die are visited recursively. */
9048
9049 static void
9050 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9051 {
9052 unsigned int abbrev_id = 0;
9053 dw_die_ref c;
9054 dw_attr_node *a;
9055 unsigned ix;
9056 dw_die_ref abbrev;
9057
9058 /* Scan the DIE references, and replace any that refer to
9059 DIEs from other CUs (i.e. those which are not marked) with
9060 the local stubs we built in optimize_external_refs. */
9061 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9062 if (AT_class (a) == dw_val_class_die_ref
9063 && (c = AT_ref (a))->die_mark == 0)
9064 {
9065 struct external_ref *ref_p;
9066 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9067
9068 ref_p = lookup_external_ref (extern_map, c);
9069 if (ref_p->stub && ref_p->stub != die)
9070 change_AT_die_ref (a, ref_p->stub);
9071 else
9072 /* We aren't changing this reference, so mark it external. */
9073 set_AT_ref_external (a, 1);
9074 }
9075
9076 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9077 {
9078 dw_attr_node *die_a, *abbrev_a;
9079 unsigned ix;
9080 bool ok = true;
9081
9082 if (abbrev_id == 0)
9083 continue;
9084 if (abbrev->die_tag != die->die_tag)
9085 continue;
9086 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9087 continue;
9088
9089 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9090 continue;
9091
9092 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9093 {
9094 abbrev_a = &(*abbrev->die_attr)[ix];
9095 if ((abbrev_a->dw_attr != die_a->dw_attr)
9096 || (value_format (abbrev_a) != value_format (die_a)))
9097 {
9098 ok = false;
9099 break;
9100 }
9101 }
9102 if (ok)
9103 break;
9104 }
9105
9106 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9107 {
9108 vec_safe_push (abbrev_die_table, die);
9109 if (abbrev_opt_start)
9110 abbrev_usage_count.safe_push (0);
9111 }
9112 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9113 {
9114 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9115 sorted_abbrev_dies.safe_push (die);
9116 }
9117
9118 die->die_abbrev = abbrev_id;
9119 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9120 }
9121
9122 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9123 by die_abbrev's usage count, from the most commonly used
9124 abbreviation to the least. */
9125
9126 static int
9127 die_abbrev_cmp (const void *p1, const void *p2)
9128 {
9129 dw_die_ref die1 = *(const dw_die_ref *) p1;
9130 dw_die_ref die2 = *(const dw_die_ref *) p2;
9131
9132 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9133 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9134
9135 if (die1->die_abbrev >= abbrev_opt_base_type_end
9136 && die2->die_abbrev >= abbrev_opt_base_type_end)
9137 {
9138 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9139 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9140 return -1;
9141 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9142 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9143 return 1;
9144 }
9145
9146 /* Stabilize the sort. */
9147 if (die1->die_abbrev < die2->die_abbrev)
9148 return -1;
9149 if (die1->die_abbrev > die2->die_abbrev)
9150 return 1;
9151
9152 return 0;
9153 }
9154
9155 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9156 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9157 into dw_val_class_const_implicit or
9158 dw_val_class_unsigned_const_implicit. */
9159
9160 static void
9161 optimize_implicit_const (unsigned int first_id, unsigned int end,
9162 vec<bool> &implicit_consts)
9163 {
9164 /* It never makes sense if there is just one DIE using the abbreviation. */
9165 if (end < first_id + 2)
9166 return;
9167
9168 dw_attr_node *a;
9169 unsigned ix, i;
9170 dw_die_ref die = sorted_abbrev_dies[first_id];
9171 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9172 if (implicit_consts[ix])
9173 {
9174 enum dw_val_class new_class = dw_val_class_none;
9175 switch (AT_class (a))
9176 {
9177 case dw_val_class_unsigned_const:
9178 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9179 continue;
9180
9181 /* The .debug_abbrev section will grow by
9182 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9183 in all the DIEs using that abbreviation. */
9184 if (constant_size (AT_unsigned (a)) * (end - first_id)
9185 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9186 continue;
9187
9188 new_class = dw_val_class_unsigned_const_implicit;
9189 break;
9190
9191 case dw_val_class_const:
9192 new_class = dw_val_class_const_implicit;
9193 break;
9194
9195 case dw_val_class_file:
9196 new_class = dw_val_class_file_implicit;
9197 break;
9198
9199 default:
9200 continue;
9201 }
9202 for (i = first_id; i < end; i++)
9203 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9204 = new_class;
9205 }
9206 }
9207
9208 /* Attempt to optimize abbreviation table from abbrev_opt_start
9209 abbreviation above. */
9210
9211 static void
9212 optimize_abbrev_table (void)
9213 {
9214 if (abbrev_opt_start
9215 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9216 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9217 {
9218 auto_vec<bool, 32> implicit_consts;
9219 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9220
9221 unsigned int abbrev_id = abbrev_opt_start - 1;
9222 unsigned int first_id = ~0U;
9223 unsigned int last_abbrev_id = 0;
9224 unsigned int i;
9225 dw_die_ref die;
9226 if (abbrev_opt_base_type_end > abbrev_opt_start)
9227 abbrev_id = abbrev_opt_base_type_end - 1;
9228 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9229 most commonly used abbreviations come first. */
9230 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9231 {
9232 dw_attr_node *a;
9233 unsigned ix;
9234
9235 /* If calc_base_type_die_sizes has been called, the CU and
9236 base types after it can't be optimized, because we've already
9237 calculated their DIE offsets. We've sorted them first. */
9238 if (die->die_abbrev < abbrev_opt_base_type_end)
9239 continue;
9240 if (die->die_abbrev != last_abbrev_id)
9241 {
9242 last_abbrev_id = die->die_abbrev;
9243 if (dwarf_version >= 5 && first_id != ~0U)
9244 optimize_implicit_const (first_id, i, implicit_consts);
9245 abbrev_id++;
9246 (*abbrev_die_table)[abbrev_id] = die;
9247 if (dwarf_version >= 5)
9248 {
9249 first_id = i;
9250 implicit_consts.truncate (0);
9251
9252 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9253 switch (AT_class (a))
9254 {
9255 case dw_val_class_const:
9256 case dw_val_class_unsigned_const:
9257 case dw_val_class_file:
9258 implicit_consts.safe_push (true);
9259 break;
9260 default:
9261 implicit_consts.safe_push (false);
9262 break;
9263 }
9264 }
9265 }
9266 else if (dwarf_version >= 5)
9267 {
9268 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9269 if (!implicit_consts[ix])
9270 continue;
9271 else
9272 {
9273 dw_attr_node *other_a
9274 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9275 if (!dw_val_equal_p (&a->dw_attr_val,
9276 &other_a->dw_attr_val))
9277 implicit_consts[ix] = false;
9278 }
9279 }
9280 die->die_abbrev = abbrev_id;
9281 }
9282 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9283 if (dwarf_version >= 5 && first_id != ~0U)
9284 optimize_implicit_const (first_id, i, implicit_consts);
9285 }
9286
9287 abbrev_opt_start = 0;
9288 abbrev_opt_base_type_end = 0;
9289 abbrev_usage_count.release ();
9290 sorted_abbrev_dies.release ();
9291 }
9292 \f
9293 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9294
9295 static int
9296 constant_size (unsigned HOST_WIDE_INT value)
9297 {
9298 int log;
9299
9300 if (value == 0)
9301 log = 0;
9302 else
9303 log = floor_log2 (value);
9304
9305 log = log / 8;
9306 log = 1 << (floor_log2 (log) + 1);
9307
9308 return log;
9309 }
9310
9311 /* Return the size of a DIE as it is represented in the
9312 .debug_info section. */
9313
9314 static unsigned long
9315 size_of_die (dw_die_ref die)
9316 {
9317 unsigned long size = 0;
9318 dw_attr_node *a;
9319 unsigned ix;
9320 enum dwarf_form form;
9321
9322 size += size_of_uleb128 (die->die_abbrev);
9323 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9324 {
9325 switch (AT_class (a))
9326 {
9327 case dw_val_class_addr:
9328 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9329 {
9330 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9331 size += size_of_uleb128 (AT_index (a));
9332 }
9333 else
9334 size += DWARF2_ADDR_SIZE;
9335 break;
9336 case dw_val_class_offset:
9337 size += DWARF_OFFSET_SIZE;
9338 break;
9339 case dw_val_class_loc:
9340 {
9341 unsigned long lsize = size_of_locs (AT_loc (a));
9342
9343 /* Block length. */
9344 if (dwarf_version >= 4)
9345 size += size_of_uleb128 (lsize);
9346 else
9347 size += constant_size (lsize);
9348 size += lsize;
9349 }
9350 break;
9351 case dw_val_class_loc_list:
9352 case dw_val_class_view_list:
9353 if (dwarf_split_debug_info && dwarf_version >= 5)
9354 {
9355 gcc_assert (AT_loc_list (a)->num_assigned);
9356 size += size_of_uleb128 (AT_loc_list (a)->hash);
9357 }
9358 else
9359 size += DWARF_OFFSET_SIZE;
9360 break;
9361 case dw_val_class_range_list:
9362 if (value_format (a) == DW_FORM_rnglistx)
9363 {
9364 gcc_assert (rnglist_idx);
9365 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9366 size += size_of_uleb128 (r->idx);
9367 }
9368 else
9369 size += DWARF_OFFSET_SIZE;
9370 break;
9371 case dw_val_class_const:
9372 size += size_of_sleb128 (AT_int (a));
9373 break;
9374 case dw_val_class_unsigned_const:
9375 {
9376 int csize = constant_size (AT_unsigned (a));
9377 if (dwarf_version == 3
9378 && a->dw_attr == DW_AT_data_member_location
9379 && csize >= 4)
9380 size += size_of_uleb128 (AT_unsigned (a));
9381 else
9382 size += csize;
9383 }
9384 break;
9385 case dw_val_class_symview:
9386 if (symview_upper_bound <= 0xff)
9387 size += 1;
9388 else if (symview_upper_bound <= 0xffff)
9389 size += 2;
9390 else if (symview_upper_bound <= 0xffffffff)
9391 size += 4;
9392 else
9393 size += 8;
9394 break;
9395 case dw_val_class_const_implicit:
9396 case dw_val_class_unsigned_const_implicit:
9397 case dw_val_class_file_implicit:
9398 /* These occupy no size in the DIE, just an extra sleb128 in
9399 .debug_abbrev. */
9400 break;
9401 case dw_val_class_const_double:
9402 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9403 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9404 size++; /* block */
9405 break;
9406 case dw_val_class_wide_int:
9407 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9408 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9409 if (get_full_len (*a->dw_attr_val.v.val_wide)
9410 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9411 size++; /* block */
9412 break;
9413 case dw_val_class_vec:
9414 size += constant_size (a->dw_attr_val.v.val_vec.length
9415 * a->dw_attr_val.v.val_vec.elt_size)
9416 + a->dw_attr_val.v.val_vec.length
9417 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9418 break;
9419 case dw_val_class_flag:
9420 if (dwarf_version >= 4)
9421 /* Currently all add_AT_flag calls pass in 1 as last argument,
9422 so DW_FORM_flag_present can be used. If that ever changes,
9423 we'll need to use DW_FORM_flag and have some optimization
9424 in build_abbrev_table that will change those to
9425 DW_FORM_flag_present if it is set to 1 in all DIEs using
9426 the same abbrev entry. */
9427 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9428 else
9429 size += 1;
9430 break;
9431 case dw_val_class_die_ref:
9432 if (AT_ref_external (a))
9433 {
9434 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9435 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9436 is sized by target address length, whereas in DWARF3
9437 it's always sized as an offset. */
9438 if (use_debug_types)
9439 size += DWARF_TYPE_SIGNATURE_SIZE;
9440 else if (dwarf_version == 2)
9441 size += DWARF2_ADDR_SIZE;
9442 else
9443 size += DWARF_OFFSET_SIZE;
9444 }
9445 else
9446 size += DWARF_OFFSET_SIZE;
9447 break;
9448 case dw_val_class_fde_ref:
9449 size += DWARF_OFFSET_SIZE;
9450 break;
9451 case dw_val_class_lbl_id:
9452 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9453 {
9454 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9455 size += size_of_uleb128 (AT_index (a));
9456 }
9457 else
9458 size += DWARF2_ADDR_SIZE;
9459 break;
9460 case dw_val_class_lineptr:
9461 case dw_val_class_macptr:
9462 case dw_val_class_loclistsptr:
9463 size += DWARF_OFFSET_SIZE;
9464 break;
9465 case dw_val_class_str:
9466 form = AT_string_form (a);
9467 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9468 size += DWARF_OFFSET_SIZE;
9469 else if (form == dwarf_FORM (DW_FORM_strx))
9470 size += size_of_uleb128 (AT_index (a));
9471 else
9472 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9473 break;
9474 case dw_val_class_file:
9475 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9476 break;
9477 case dw_val_class_data8:
9478 size += 8;
9479 break;
9480 case dw_val_class_vms_delta:
9481 size += DWARF_OFFSET_SIZE;
9482 break;
9483 case dw_val_class_high_pc:
9484 size += DWARF2_ADDR_SIZE;
9485 break;
9486 case dw_val_class_discr_value:
9487 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9488 break;
9489 case dw_val_class_discr_list:
9490 {
9491 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9492
9493 /* This is a block, so we have the block length and then its
9494 data. */
9495 size += constant_size (block_size) + block_size;
9496 }
9497 break;
9498 default:
9499 gcc_unreachable ();
9500 }
9501 }
9502
9503 return size;
9504 }
9505
9506 /* Size the debugging information associated with a given DIE. Visits the
9507 DIE's children recursively. Updates the global variable next_die_offset, on
9508 each time through. Uses the current value of next_die_offset to update the
9509 die_offset field in each DIE. */
9510
9511 static void
9512 calc_die_sizes (dw_die_ref die)
9513 {
9514 dw_die_ref c;
9515
9516 gcc_assert (die->die_offset == 0
9517 || (unsigned long int) die->die_offset == next_die_offset);
9518 die->die_offset = next_die_offset;
9519 next_die_offset += size_of_die (die);
9520
9521 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9522
9523 if (die->die_child != NULL)
9524 /* Count the null byte used to terminate sibling lists. */
9525 next_die_offset += 1;
9526 }
9527
9528 /* Size just the base type children at the start of the CU.
9529 This is needed because build_abbrev needs to size locs
9530 and sizing of type based stack ops needs to know die_offset
9531 values for the base types. */
9532
9533 static void
9534 calc_base_type_die_sizes (void)
9535 {
9536 unsigned long die_offset = (dwarf_split_debug_info
9537 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9538 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9539 unsigned int i;
9540 dw_die_ref base_type;
9541 #if ENABLE_ASSERT_CHECKING
9542 dw_die_ref prev = comp_unit_die ()->die_child;
9543 #endif
9544
9545 die_offset += size_of_die (comp_unit_die ());
9546 for (i = 0; base_types.iterate (i, &base_type); i++)
9547 {
9548 #if ENABLE_ASSERT_CHECKING
9549 gcc_assert (base_type->die_offset == 0
9550 && prev->die_sib == base_type
9551 && base_type->die_child == NULL
9552 && base_type->die_abbrev);
9553 prev = base_type;
9554 #endif
9555 if (abbrev_opt_start
9556 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9557 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9558 base_type->die_offset = die_offset;
9559 die_offset += size_of_die (base_type);
9560 }
9561 }
9562
9563 /* Set the marks for a die and its children. We do this so
9564 that we know whether or not a reference needs to use FORM_ref_addr; only
9565 DIEs in the same CU will be marked. We used to clear out the offset
9566 and use that as the flag, but ran into ordering problems. */
9567
9568 static void
9569 mark_dies (dw_die_ref die)
9570 {
9571 dw_die_ref c;
9572
9573 gcc_assert (!die->die_mark);
9574
9575 die->die_mark = 1;
9576 FOR_EACH_CHILD (die, c, mark_dies (c));
9577 }
9578
9579 /* Clear the marks for a die and its children. */
9580
9581 static void
9582 unmark_dies (dw_die_ref die)
9583 {
9584 dw_die_ref c;
9585
9586 if (! use_debug_types)
9587 gcc_assert (die->die_mark);
9588
9589 die->die_mark = 0;
9590 FOR_EACH_CHILD (die, c, unmark_dies (c));
9591 }
9592
9593 /* Clear the marks for a die, its children and referred dies. */
9594
9595 static void
9596 unmark_all_dies (dw_die_ref die)
9597 {
9598 dw_die_ref c;
9599 dw_attr_node *a;
9600 unsigned ix;
9601
9602 if (!die->die_mark)
9603 return;
9604 die->die_mark = 0;
9605
9606 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9607
9608 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9609 if (AT_class (a) == dw_val_class_die_ref)
9610 unmark_all_dies (AT_ref (a));
9611 }
9612
9613 /* Calculate if the entry should appear in the final output file. It may be
9614 from a pruned a type. */
9615
9616 static bool
9617 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9618 {
9619 /* By limiting gnu pubnames to definitions only, gold can generate a
9620 gdb index without entries for declarations, which don't include
9621 enough information to be useful. */
9622 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9623 return false;
9624
9625 if (table == pubname_table)
9626 {
9627 /* Enumerator names are part of the pubname table, but the
9628 parent DW_TAG_enumeration_type die may have been pruned.
9629 Don't output them if that is the case. */
9630 if (p->die->die_tag == DW_TAG_enumerator &&
9631 (p->die->die_parent == NULL
9632 || !p->die->die_parent->die_perennial_p))
9633 return false;
9634
9635 /* Everything else in the pubname table is included. */
9636 return true;
9637 }
9638
9639 /* The pubtypes table shouldn't include types that have been
9640 pruned. */
9641 return (p->die->die_offset != 0
9642 || !flag_eliminate_unused_debug_types);
9643 }
9644
9645 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9646 generated for the compilation unit. */
9647
9648 static unsigned long
9649 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9650 {
9651 unsigned long size;
9652 unsigned i;
9653 pubname_entry *p;
9654 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9655
9656 size = DWARF_PUBNAMES_HEADER_SIZE;
9657 FOR_EACH_VEC_ELT (*names, i, p)
9658 if (include_pubname_in_output (names, p))
9659 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9660
9661 size += DWARF_OFFSET_SIZE;
9662 return size;
9663 }
9664
9665 /* Return the size of the information in the .debug_aranges section. */
9666
9667 static unsigned long
9668 size_of_aranges (void)
9669 {
9670 unsigned long size;
9671
9672 size = DWARF_ARANGES_HEADER_SIZE;
9673
9674 /* Count the address/length pair for this compilation unit. */
9675 if (text_section_used)
9676 size += 2 * DWARF2_ADDR_SIZE;
9677 if (cold_text_section_used)
9678 size += 2 * DWARF2_ADDR_SIZE;
9679 if (have_multiple_function_sections)
9680 {
9681 unsigned fde_idx;
9682 dw_fde_ref fde;
9683
9684 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9685 {
9686 if (DECL_IGNORED_P (fde->decl))
9687 continue;
9688 if (!fde->in_std_section)
9689 size += 2 * DWARF2_ADDR_SIZE;
9690 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9691 size += 2 * DWARF2_ADDR_SIZE;
9692 }
9693 }
9694
9695 /* Count the two zero words used to terminated the address range table. */
9696 size += 2 * DWARF2_ADDR_SIZE;
9697 return size;
9698 }
9699 \f
9700 /* Select the encoding of an attribute value. */
9701
9702 static enum dwarf_form
9703 value_format (dw_attr_node *a)
9704 {
9705 switch (AT_class (a))
9706 {
9707 case dw_val_class_addr:
9708 /* Only very few attributes allow DW_FORM_addr. */
9709 switch (a->dw_attr)
9710 {
9711 case DW_AT_low_pc:
9712 case DW_AT_high_pc:
9713 case DW_AT_entry_pc:
9714 case DW_AT_trampoline:
9715 return (AT_index (a) == NOT_INDEXED
9716 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9717 default:
9718 break;
9719 }
9720 switch (DWARF2_ADDR_SIZE)
9721 {
9722 case 1:
9723 return DW_FORM_data1;
9724 case 2:
9725 return DW_FORM_data2;
9726 case 4:
9727 return DW_FORM_data4;
9728 case 8:
9729 return DW_FORM_data8;
9730 default:
9731 gcc_unreachable ();
9732 }
9733 case dw_val_class_loc_list:
9734 case dw_val_class_view_list:
9735 if (dwarf_split_debug_info
9736 && dwarf_version >= 5
9737 && AT_loc_list (a)->num_assigned)
9738 return DW_FORM_loclistx;
9739 /* FALLTHRU */
9740 case dw_val_class_range_list:
9741 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9742 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9743 care about sizes of .debug* sections in shared libraries and
9744 executables and don't take into account relocations that affect just
9745 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9746 table in the .debug_rnglists section. */
9747 if (dwarf_split_debug_info
9748 && dwarf_version >= 5
9749 && AT_class (a) == dw_val_class_range_list
9750 && rnglist_idx
9751 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9752 return DW_FORM_rnglistx;
9753 if (dwarf_version >= 4)
9754 return DW_FORM_sec_offset;
9755 /* FALLTHRU */
9756 case dw_val_class_vms_delta:
9757 case dw_val_class_offset:
9758 switch (DWARF_OFFSET_SIZE)
9759 {
9760 case 4:
9761 return DW_FORM_data4;
9762 case 8:
9763 return DW_FORM_data8;
9764 default:
9765 gcc_unreachable ();
9766 }
9767 case dw_val_class_loc:
9768 if (dwarf_version >= 4)
9769 return DW_FORM_exprloc;
9770 switch (constant_size (size_of_locs (AT_loc (a))))
9771 {
9772 case 1:
9773 return DW_FORM_block1;
9774 case 2:
9775 return DW_FORM_block2;
9776 case 4:
9777 return DW_FORM_block4;
9778 default:
9779 gcc_unreachable ();
9780 }
9781 case dw_val_class_const:
9782 return DW_FORM_sdata;
9783 case dw_val_class_unsigned_const:
9784 switch (constant_size (AT_unsigned (a)))
9785 {
9786 case 1:
9787 return DW_FORM_data1;
9788 case 2:
9789 return DW_FORM_data2;
9790 case 4:
9791 /* In DWARF3 DW_AT_data_member_location with
9792 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9793 constant, so we need to use DW_FORM_udata if we need
9794 a large constant. */
9795 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9796 return DW_FORM_udata;
9797 return DW_FORM_data4;
9798 case 8:
9799 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9800 return DW_FORM_udata;
9801 return DW_FORM_data8;
9802 default:
9803 gcc_unreachable ();
9804 }
9805 case dw_val_class_const_implicit:
9806 case dw_val_class_unsigned_const_implicit:
9807 case dw_val_class_file_implicit:
9808 return DW_FORM_implicit_const;
9809 case dw_val_class_const_double:
9810 switch (HOST_BITS_PER_WIDE_INT)
9811 {
9812 case 8:
9813 return DW_FORM_data2;
9814 case 16:
9815 return DW_FORM_data4;
9816 case 32:
9817 return DW_FORM_data8;
9818 case 64:
9819 if (dwarf_version >= 5)
9820 return DW_FORM_data16;
9821 /* FALLTHRU */
9822 default:
9823 return DW_FORM_block1;
9824 }
9825 case dw_val_class_wide_int:
9826 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9827 {
9828 case 8:
9829 return DW_FORM_data1;
9830 case 16:
9831 return DW_FORM_data2;
9832 case 32:
9833 return DW_FORM_data4;
9834 case 64:
9835 return DW_FORM_data8;
9836 case 128:
9837 if (dwarf_version >= 5)
9838 return DW_FORM_data16;
9839 /* FALLTHRU */
9840 default:
9841 return DW_FORM_block1;
9842 }
9843 case dw_val_class_symview:
9844 /* ??? We might use uleb128, but then we'd have to compute
9845 .debug_info offsets in the assembler. */
9846 if (symview_upper_bound <= 0xff)
9847 return DW_FORM_data1;
9848 else if (symview_upper_bound <= 0xffff)
9849 return DW_FORM_data2;
9850 else if (symview_upper_bound <= 0xffffffff)
9851 return DW_FORM_data4;
9852 else
9853 return DW_FORM_data8;
9854 case dw_val_class_vec:
9855 switch (constant_size (a->dw_attr_val.v.val_vec.length
9856 * a->dw_attr_val.v.val_vec.elt_size))
9857 {
9858 case 1:
9859 return DW_FORM_block1;
9860 case 2:
9861 return DW_FORM_block2;
9862 case 4:
9863 return DW_FORM_block4;
9864 default:
9865 gcc_unreachable ();
9866 }
9867 case dw_val_class_flag:
9868 if (dwarf_version >= 4)
9869 {
9870 /* Currently all add_AT_flag calls pass in 1 as last argument,
9871 so DW_FORM_flag_present can be used. If that ever changes,
9872 we'll need to use DW_FORM_flag and have some optimization
9873 in build_abbrev_table that will change those to
9874 DW_FORM_flag_present if it is set to 1 in all DIEs using
9875 the same abbrev entry. */
9876 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9877 return DW_FORM_flag_present;
9878 }
9879 return DW_FORM_flag;
9880 case dw_val_class_die_ref:
9881 if (AT_ref_external (a))
9882 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9883 else
9884 return DW_FORM_ref;
9885 case dw_val_class_fde_ref:
9886 return DW_FORM_data;
9887 case dw_val_class_lbl_id:
9888 return (AT_index (a) == NOT_INDEXED
9889 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9890 case dw_val_class_lineptr:
9891 case dw_val_class_macptr:
9892 case dw_val_class_loclistsptr:
9893 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9894 case dw_val_class_str:
9895 return AT_string_form (a);
9896 case dw_val_class_file:
9897 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9898 {
9899 case 1:
9900 return DW_FORM_data1;
9901 case 2:
9902 return DW_FORM_data2;
9903 case 4:
9904 return DW_FORM_data4;
9905 default:
9906 gcc_unreachable ();
9907 }
9908
9909 case dw_val_class_data8:
9910 return DW_FORM_data8;
9911
9912 case dw_val_class_high_pc:
9913 switch (DWARF2_ADDR_SIZE)
9914 {
9915 case 1:
9916 return DW_FORM_data1;
9917 case 2:
9918 return DW_FORM_data2;
9919 case 4:
9920 return DW_FORM_data4;
9921 case 8:
9922 return DW_FORM_data8;
9923 default:
9924 gcc_unreachable ();
9925 }
9926
9927 case dw_val_class_discr_value:
9928 return (a->dw_attr_val.v.val_discr_value.pos
9929 ? DW_FORM_udata
9930 : DW_FORM_sdata);
9931 case dw_val_class_discr_list:
9932 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9933 {
9934 case 1:
9935 return DW_FORM_block1;
9936 case 2:
9937 return DW_FORM_block2;
9938 case 4:
9939 return DW_FORM_block4;
9940 default:
9941 gcc_unreachable ();
9942 }
9943
9944 default:
9945 gcc_unreachable ();
9946 }
9947 }
9948
9949 /* Output the encoding of an attribute value. */
9950
9951 static void
9952 output_value_format (dw_attr_node *a)
9953 {
9954 enum dwarf_form form = value_format (a);
9955
9956 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9957 }
9958
9959 /* Given a die and id, produce the appropriate abbreviations. */
9960
9961 static void
9962 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9963 {
9964 unsigned ix;
9965 dw_attr_node *a_attr;
9966
9967 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9968 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9969 dwarf_tag_name (abbrev->die_tag));
9970
9971 if (abbrev->die_child != NULL)
9972 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9973 else
9974 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9975
9976 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9977 {
9978 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9979 dwarf_attr_name (a_attr->dw_attr));
9980 output_value_format (a_attr);
9981 if (value_format (a_attr) == DW_FORM_implicit_const)
9982 {
9983 if (AT_class (a_attr) == dw_val_class_file_implicit)
9984 {
9985 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9986 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9987 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9988 }
9989 else
9990 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9991 }
9992 }
9993
9994 dw2_asm_output_data (1, 0, NULL);
9995 dw2_asm_output_data (1, 0, NULL);
9996 }
9997
9998
9999 /* Output the .debug_abbrev section which defines the DIE abbreviation
10000 table. */
10001
10002 static void
10003 output_abbrev_section (void)
10004 {
10005 unsigned int abbrev_id;
10006 dw_die_ref abbrev;
10007
10008 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
10009 if (abbrev_id != 0)
10010 output_die_abbrevs (abbrev_id, abbrev);
10011
10012 /* Terminate the table. */
10013 dw2_asm_output_data (1, 0, NULL);
10014 }
10015
10016 /* Return a new location list, given the begin and end range, and the
10017 expression. */
10018
10019 static inline dw_loc_list_ref
10020 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10021 const char *end, var_loc_view vend,
10022 const char *section)
10023 {
10024 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10025
10026 retlist->begin = begin;
10027 retlist->begin_entry = NULL;
10028 retlist->end = end;
10029 retlist->expr = expr;
10030 retlist->section = section;
10031 retlist->vbegin = vbegin;
10032 retlist->vend = vend;
10033
10034 return retlist;
10035 }
10036
10037 /* Return true iff there's any nonzero view number in the loc list.
10038
10039 ??? When views are not enabled, we'll often extend a single range
10040 to the entire function, so that we emit a single location
10041 expression rather than a location list. With views, even with a
10042 single range, we'll output a list if start or end have a nonzero
10043 view. If we change this, we may want to stop splitting a single
10044 range in dw_loc_list just because of a nonzero view, even if it
10045 straddles across hot/cold partitions. */
10046
10047 static bool
10048 loc_list_has_views (dw_loc_list_ref list)
10049 {
10050 if (!debug_variable_location_views)
10051 return false;
10052
10053 for (dw_loc_list_ref loc = list;
10054 loc != NULL; loc = loc->dw_loc_next)
10055 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10056 return true;
10057
10058 return false;
10059 }
10060
10061 /* Generate a new internal symbol for this location list node, if it
10062 hasn't got one yet. */
10063
10064 static inline void
10065 gen_llsym (dw_loc_list_ref list)
10066 {
10067 gcc_assert (!list->ll_symbol);
10068 list->ll_symbol = gen_internal_sym ("LLST");
10069
10070 if (!loc_list_has_views (list))
10071 return;
10072
10073 if (dwarf2out_locviews_in_attribute ())
10074 {
10075 /* Use the same label_num for the view list. */
10076 label_num--;
10077 list->vl_symbol = gen_internal_sym ("LVUS");
10078 }
10079 else
10080 list->vl_symbol = list->ll_symbol;
10081 }
10082
10083 /* Generate a symbol for the list, but only if we really want to emit
10084 it as a list. */
10085
10086 static inline void
10087 maybe_gen_llsym (dw_loc_list_ref list)
10088 {
10089 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10090 return;
10091
10092 gen_llsym (list);
10093 }
10094
10095 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10096 NULL, don't consider size of the location expression. If we're not
10097 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10098 representation in *SIZEP. */
10099
10100 static bool
10101 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10102 {
10103 /* Don't output an entry that starts and ends at the same address. */
10104 if (strcmp (curr->begin, curr->end) == 0
10105 && curr->vbegin == curr->vend && !curr->force)
10106 return true;
10107
10108 if (!sizep)
10109 return false;
10110
10111 unsigned long size = size_of_locs (curr->expr);
10112
10113 /* If the expression is too large, drop it on the floor. We could
10114 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10115 in the expression, but >= 64KB expressions for a single value
10116 in a single range are unlikely very useful. */
10117 if (dwarf_version < 5 && size > 0xffff)
10118 return true;
10119
10120 *sizep = size;
10121
10122 return false;
10123 }
10124
10125 /* Output a view pair loclist entry for CURR, if it requires one. */
10126
10127 static void
10128 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10129 {
10130 if (!dwarf2out_locviews_in_loclist ())
10131 return;
10132
10133 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10134 return;
10135
10136 #ifdef DW_LLE_view_pair
10137 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10138
10139 if (dwarf2out_as_locview_support)
10140 {
10141 if (ZERO_VIEW_P (curr->vbegin))
10142 dw2_asm_output_data_uleb128 (0, "Location view begin");
10143 else
10144 {
10145 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10146 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10147 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10148 }
10149
10150 if (ZERO_VIEW_P (curr->vend))
10151 dw2_asm_output_data_uleb128 (0, "Location view end");
10152 else
10153 {
10154 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10155 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10156 dw2_asm_output_symname_uleb128 (label, "Location view end");
10157 }
10158 }
10159 else
10160 {
10161 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10162 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10163 }
10164 #endif /* DW_LLE_view_pair */
10165
10166 return;
10167 }
10168
10169 /* Output the location list given to us. */
10170
10171 static void
10172 output_loc_list (dw_loc_list_ref list_head)
10173 {
10174 int vcount = 0, lcount = 0;
10175
10176 if (list_head->emitted)
10177 return;
10178 list_head->emitted = true;
10179
10180 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10181 {
10182 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10183
10184 for (dw_loc_list_ref curr = list_head; curr != NULL;
10185 curr = curr->dw_loc_next)
10186 {
10187 unsigned long size;
10188
10189 if (skip_loc_list_entry (curr, &size))
10190 continue;
10191
10192 vcount++;
10193
10194 /* ?? dwarf_split_debug_info? */
10195 if (dwarf2out_as_locview_support)
10196 {
10197 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10198
10199 if (!ZERO_VIEW_P (curr->vbegin))
10200 {
10201 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10202 dw2_asm_output_symname_uleb128 (label,
10203 "View list begin (%s)",
10204 list_head->vl_symbol);
10205 }
10206 else
10207 dw2_asm_output_data_uleb128 (0,
10208 "View list begin (%s)",
10209 list_head->vl_symbol);
10210
10211 if (!ZERO_VIEW_P (curr->vend))
10212 {
10213 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10214 dw2_asm_output_symname_uleb128 (label,
10215 "View list end (%s)",
10216 list_head->vl_symbol);
10217 }
10218 else
10219 dw2_asm_output_data_uleb128 (0,
10220 "View list end (%s)",
10221 list_head->vl_symbol);
10222 }
10223 else
10224 {
10225 dw2_asm_output_data_uleb128 (curr->vbegin,
10226 "View list begin (%s)",
10227 list_head->vl_symbol);
10228 dw2_asm_output_data_uleb128 (curr->vend,
10229 "View list end (%s)",
10230 list_head->vl_symbol);
10231 }
10232 }
10233 }
10234
10235 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10236
10237 const char *last_section = NULL;
10238 const char *base_label = NULL;
10239
10240 /* Walk the location list, and output each range + expression. */
10241 for (dw_loc_list_ref curr = list_head; curr != NULL;
10242 curr = curr->dw_loc_next)
10243 {
10244 unsigned long size;
10245
10246 /* Skip this entry? If we skip it here, we must skip it in the
10247 view list above as well. */
10248 if (skip_loc_list_entry (curr, &size))
10249 continue;
10250
10251 lcount++;
10252
10253 if (dwarf_version >= 5)
10254 {
10255 if (dwarf_split_debug_info)
10256 {
10257 dwarf2out_maybe_output_loclist_view_pair (curr);
10258 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10259 uleb128 index into .debug_addr and uleb128 length. */
10260 dw2_asm_output_data (1, DW_LLE_startx_length,
10261 "DW_LLE_startx_length (%s)",
10262 list_head->ll_symbol);
10263 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10264 "Location list range start index "
10265 "(%s)", curr->begin);
10266 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10267 For that case we probably need to emit DW_LLE_startx_endx,
10268 but we'd need 2 .debug_addr entries rather than just one. */
10269 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10270 "Location list length (%s)",
10271 list_head->ll_symbol);
10272 }
10273 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10274 {
10275 dwarf2out_maybe_output_loclist_view_pair (curr);
10276 /* If all code is in .text section, the base address is
10277 already provided by the CU attributes. Use
10278 DW_LLE_offset_pair where both addresses are uleb128 encoded
10279 offsets against that base. */
10280 dw2_asm_output_data (1, DW_LLE_offset_pair,
10281 "DW_LLE_offset_pair (%s)",
10282 list_head->ll_symbol);
10283 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10284 "Location list begin address (%s)",
10285 list_head->ll_symbol);
10286 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10287 "Location list end address (%s)",
10288 list_head->ll_symbol);
10289 }
10290 else if (HAVE_AS_LEB128)
10291 {
10292 /* Otherwise, find out how many consecutive entries could share
10293 the same base entry. If just one, emit DW_LLE_start_length,
10294 otherwise emit DW_LLE_base_address for the base address
10295 followed by a series of DW_LLE_offset_pair. */
10296 if (last_section == NULL || curr->section != last_section)
10297 {
10298 dw_loc_list_ref curr2;
10299 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10300 curr2 = curr2->dw_loc_next)
10301 {
10302 if (strcmp (curr2->begin, curr2->end) == 0
10303 && !curr2->force)
10304 continue;
10305 break;
10306 }
10307 if (curr2 == NULL || curr->section != curr2->section)
10308 last_section = NULL;
10309 else
10310 {
10311 last_section = curr->section;
10312 base_label = curr->begin;
10313 dw2_asm_output_data (1, DW_LLE_base_address,
10314 "DW_LLE_base_address (%s)",
10315 list_head->ll_symbol);
10316 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10317 "Base address (%s)",
10318 list_head->ll_symbol);
10319 }
10320 }
10321 /* Only one entry with the same base address. Use
10322 DW_LLE_start_length with absolute address and uleb128
10323 length. */
10324 if (last_section == NULL)
10325 {
10326 dwarf2out_maybe_output_loclist_view_pair (curr);
10327 dw2_asm_output_data (1, DW_LLE_start_length,
10328 "DW_LLE_start_length (%s)",
10329 list_head->ll_symbol);
10330 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10331 "Location list begin address (%s)",
10332 list_head->ll_symbol);
10333 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10334 "Location list length "
10335 "(%s)", list_head->ll_symbol);
10336 }
10337 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10338 DW_LLE_base_address. */
10339 else
10340 {
10341 dwarf2out_maybe_output_loclist_view_pair (curr);
10342 dw2_asm_output_data (1, DW_LLE_offset_pair,
10343 "DW_LLE_offset_pair (%s)",
10344 list_head->ll_symbol);
10345 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10346 "Location list begin address "
10347 "(%s)", list_head->ll_symbol);
10348 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10349 "Location list end address "
10350 "(%s)", list_head->ll_symbol);
10351 }
10352 }
10353 /* The assembler does not support .uleb128 directive. Emit
10354 DW_LLE_start_end with a pair of absolute addresses. */
10355 else
10356 {
10357 dwarf2out_maybe_output_loclist_view_pair (curr);
10358 dw2_asm_output_data (1, DW_LLE_start_end,
10359 "DW_LLE_start_end (%s)",
10360 list_head->ll_symbol);
10361 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10362 "Location list begin address (%s)",
10363 list_head->ll_symbol);
10364 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10365 "Location list end address (%s)",
10366 list_head->ll_symbol);
10367 }
10368 }
10369 else if (dwarf_split_debug_info)
10370 {
10371 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10372 and 4 byte length. */
10373 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10374 "Location list start/length entry (%s)",
10375 list_head->ll_symbol);
10376 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10377 "Location list range start index (%s)",
10378 curr->begin);
10379 /* The length field is 4 bytes. If we ever need to support
10380 an 8-byte length, we can add a new DW_LLE code or fall back
10381 to DW_LLE_GNU_start_end_entry. */
10382 dw2_asm_output_delta (4, curr->end, curr->begin,
10383 "Location list range length (%s)",
10384 list_head->ll_symbol);
10385 }
10386 else if (!have_multiple_function_sections)
10387 {
10388 /* Pair of relative addresses against start of text section. */
10389 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10390 "Location list begin address (%s)",
10391 list_head->ll_symbol);
10392 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10393 "Location list end address (%s)",
10394 list_head->ll_symbol);
10395 }
10396 else
10397 {
10398 /* Pair of absolute addresses. */
10399 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10400 "Location list begin address (%s)",
10401 list_head->ll_symbol);
10402 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10403 "Location list end address (%s)",
10404 list_head->ll_symbol);
10405 }
10406
10407 /* Output the block length for this list of location operations. */
10408 if (dwarf_version >= 5)
10409 dw2_asm_output_data_uleb128 (size, "Location expression size");
10410 else
10411 {
10412 gcc_assert (size <= 0xffff);
10413 dw2_asm_output_data (2, size, "Location expression size");
10414 }
10415
10416 output_loc_sequence (curr->expr, -1);
10417 }
10418
10419 /* And finally list termination. */
10420 if (dwarf_version >= 5)
10421 dw2_asm_output_data (1, DW_LLE_end_of_list,
10422 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10423 else if (dwarf_split_debug_info)
10424 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10425 "Location list terminator (%s)",
10426 list_head->ll_symbol);
10427 else
10428 {
10429 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10430 "Location list terminator begin (%s)",
10431 list_head->ll_symbol);
10432 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10433 "Location list terminator end (%s)",
10434 list_head->ll_symbol);
10435 }
10436
10437 gcc_assert (!list_head->vl_symbol
10438 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10439 }
10440
10441 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10442 section. Emit a relocated reference if val_entry is NULL, otherwise,
10443 emit an indirect reference. */
10444
10445 static void
10446 output_range_list_offset (dw_attr_node *a)
10447 {
10448 const char *name = dwarf_attr_name (a->dw_attr);
10449
10450 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10451 {
10452 if (dwarf_version >= 5)
10453 {
10454 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10455 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10456 debug_ranges_section, "%s", name);
10457 }
10458 else
10459 {
10460 char *p = strchr (ranges_section_label, '\0');
10461 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10462 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10463 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10464 debug_ranges_section, "%s", name);
10465 *p = '\0';
10466 }
10467 }
10468 else if (dwarf_version >= 5)
10469 {
10470 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10471 gcc_assert (rnglist_idx);
10472 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10473 }
10474 else
10475 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10476 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10477 "%s (offset from %s)", name, ranges_section_label);
10478 }
10479
10480 /* Output the offset into the debug_loc section. */
10481
10482 static void
10483 output_loc_list_offset (dw_attr_node *a)
10484 {
10485 char *sym = AT_loc_list (a)->ll_symbol;
10486
10487 gcc_assert (sym);
10488 if (!dwarf_split_debug_info)
10489 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10490 "%s", dwarf_attr_name (a->dw_attr));
10491 else if (dwarf_version >= 5)
10492 {
10493 gcc_assert (AT_loc_list (a)->num_assigned);
10494 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10495 dwarf_attr_name (a->dw_attr),
10496 sym);
10497 }
10498 else
10499 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10500 "%s", dwarf_attr_name (a->dw_attr));
10501 }
10502
10503 /* Output the offset into the debug_loc section. */
10504
10505 static void
10506 output_view_list_offset (dw_attr_node *a)
10507 {
10508 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10509
10510 gcc_assert (sym);
10511 if (dwarf_split_debug_info)
10512 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10513 "%s", dwarf_attr_name (a->dw_attr));
10514 else
10515 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10516 "%s", dwarf_attr_name (a->dw_attr));
10517 }
10518
10519 /* Output an attribute's index or value appropriately. */
10520
10521 static void
10522 output_attr_index_or_value (dw_attr_node *a)
10523 {
10524 const char *name = dwarf_attr_name (a->dw_attr);
10525
10526 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10527 {
10528 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10529 return;
10530 }
10531 switch (AT_class (a))
10532 {
10533 case dw_val_class_addr:
10534 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10535 break;
10536 case dw_val_class_high_pc:
10537 case dw_val_class_lbl_id:
10538 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10539 break;
10540 default:
10541 gcc_unreachable ();
10542 }
10543 }
10544
10545 /* Output a type signature. */
10546
10547 static inline void
10548 output_signature (const char *sig, const char *name)
10549 {
10550 int i;
10551
10552 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10553 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10554 }
10555
10556 /* Output a discriminant value. */
10557
10558 static inline void
10559 output_discr_value (dw_discr_value *discr_value, const char *name)
10560 {
10561 if (discr_value->pos)
10562 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10563 else
10564 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10565 }
10566
10567 /* Output the DIE and its attributes. Called recursively to generate
10568 the definitions of each child DIE. */
10569
10570 static void
10571 output_die (dw_die_ref die)
10572 {
10573 dw_attr_node *a;
10574 dw_die_ref c;
10575 unsigned long size;
10576 unsigned ix;
10577
10578 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10579 (unsigned long)die->die_offset,
10580 dwarf_tag_name (die->die_tag));
10581
10582 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10583 {
10584 const char *name = dwarf_attr_name (a->dw_attr);
10585
10586 switch (AT_class (a))
10587 {
10588 case dw_val_class_addr:
10589 output_attr_index_or_value (a);
10590 break;
10591
10592 case dw_val_class_offset:
10593 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10594 "%s", name);
10595 break;
10596
10597 case dw_val_class_range_list:
10598 output_range_list_offset (a);
10599 break;
10600
10601 case dw_val_class_loc:
10602 size = size_of_locs (AT_loc (a));
10603
10604 /* Output the block length for this list of location operations. */
10605 if (dwarf_version >= 4)
10606 dw2_asm_output_data_uleb128 (size, "%s", name);
10607 else
10608 dw2_asm_output_data (constant_size (size), size, "%s", name);
10609
10610 output_loc_sequence (AT_loc (a), -1);
10611 break;
10612
10613 case dw_val_class_const:
10614 /* ??? It would be slightly more efficient to use a scheme like is
10615 used for unsigned constants below, but gdb 4.x does not sign
10616 extend. Gdb 5.x does sign extend. */
10617 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10618 break;
10619
10620 case dw_val_class_unsigned_const:
10621 {
10622 int csize = constant_size (AT_unsigned (a));
10623 if (dwarf_version == 3
10624 && a->dw_attr == DW_AT_data_member_location
10625 && csize >= 4)
10626 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10627 else
10628 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10629 }
10630 break;
10631
10632 case dw_val_class_symview:
10633 {
10634 int vsize;
10635 if (symview_upper_bound <= 0xff)
10636 vsize = 1;
10637 else if (symview_upper_bound <= 0xffff)
10638 vsize = 2;
10639 else if (symview_upper_bound <= 0xffffffff)
10640 vsize = 4;
10641 else
10642 vsize = 8;
10643 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10644 "%s", name);
10645 }
10646 break;
10647
10648 case dw_val_class_const_implicit:
10649 if (flag_debug_asm)
10650 fprintf (asm_out_file, "\t\t\t%s %s ("
10651 HOST_WIDE_INT_PRINT_DEC ")\n",
10652 ASM_COMMENT_START, name, AT_int (a));
10653 break;
10654
10655 case dw_val_class_unsigned_const_implicit:
10656 if (flag_debug_asm)
10657 fprintf (asm_out_file, "\t\t\t%s %s ("
10658 HOST_WIDE_INT_PRINT_HEX ")\n",
10659 ASM_COMMENT_START, name, AT_unsigned (a));
10660 break;
10661
10662 case dw_val_class_const_double:
10663 {
10664 unsigned HOST_WIDE_INT first, second;
10665
10666 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10667 dw2_asm_output_data (1,
10668 HOST_BITS_PER_DOUBLE_INT
10669 / HOST_BITS_PER_CHAR,
10670 NULL);
10671
10672 if (WORDS_BIG_ENDIAN)
10673 {
10674 first = a->dw_attr_val.v.val_double.high;
10675 second = a->dw_attr_val.v.val_double.low;
10676 }
10677 else
10678 {
10679 first = a->dw_attr_val.v.val_double.low;
10680 second = a->dw_attr_val.v.val_double.high;
10681 }
10682
10683 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10684 first, "%s", name);
10685 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10686 second, NULL);
10687 }
10688 break;
10689
10690 case dw_val_class_wide_int:
10691 {
10692 int i;
10693 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10694 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10695 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10696 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10697 * l, NULL);
10698
10699 if (WORDS_BIG_ENDIAN)
10700 for (i = len - 1; i >= 0; --i)
10701 {
10702 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10703 "%s", name);
10704 name = "";
10705 }
10706 else
10707 for (i = 0; i < len; ++i)
10708 {
10709 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10710 "%s", name);
10711 name = "";
10712 }
10713 }
10714 break;
10715
10716 case dw_val_class_vec:
10717 {
10718 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10719 unsigned int len = a->dw_attr_val.v.val_vec.length;
10720 unsigned int i;
10721 unsigned char *p;
10722
10723 dw2_asm_output_data (constant_size (len * elt_size),
10724 len * elt_size, "%s", name);
10725 if (elt_size > sizeof (HOST_WIDE_INT))
10726 {
10727 elt_size /= 2;
10728 len *= 2;
10729 }
10730 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10731 i < len;
10732 i++, p += elt_size)
10733 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10734 "fp or vector constant word %u", i);
10735 break;
10736 }
10737
10738 case dw_val_class_flag:
10739 if (dwarf_version >= 4)
10740 {
10741 /* Currently all add_AT_flag calls pass in 1 as last argument,
10742 so DW_FORM_flag_present can be used. If that ever changes,
10743 we'll need to use DW_FORM_flag and have some optimization
10744 in build_abbrev_table that will change those to
10745 DW_FORM_flag_present if it is set to 1 in all DIEs using
10746 the same abbrev entry. */
10747 gcc_assert (AT_flag (a) == 1);
10748 if (flag_debug_asm)
10749 fprintf (asm_out_file, "\t\t\t%s %s\n",
10750 ASM_COMMENT_START, name);
10751 break;
10752 }
10753 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10754 break;
10755
10756 case dw_val_class_loc_list:
10757 output_loc_list_offset (a);
10758 break;
10759
10760 case dw_val_class_view_list:
10761 output_view_list_offset (a);
10762 break;
10763
10764 case dw_val_class_die_ref:
10765 if (AT_ref_external (a))
10766 {
10767 if (AT_ref (a)->comdat_type_p)
10768 {
10769 comdat_type_node *type_node
10770 = AT_ref (a)->die_id.die_type_node;
10771
10772 gcc_assert (type_node);
10773 output_signature (type_node->signature, name);
10774 }
10775 else
10776 {
10777 const char *sym = AT_ref (a)->die_id.die_symbol;
10778 int size;
10779
10780 gcc_assert (sym);
10781 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10782 length, whereas in DWARF3 it's always sized as an
10783 offset. */
10784 if (dwarf_version == 2)
10785 size = DWARF2_ADDR_SIZE;
10786 else
10787 size = DWARF_OFFSET_SIZE;
10788 /* ??? We cannot unconditionally output die_offset if
10789 non-zero - others might create references to those
10790 DIEs via symbols.
10791 And we do not clear its DIE offset after outputting it
10792 (and the label refers to the actual DIEs, not the
10793 DWARF CU unit header which is when using label + offset
10794 would be the correct thing to do).
10795 ??? This is the reason for the with_offset flag. */
10796 if (AT_ref (a)->with_offset)
10797 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10798 debug_info_section, "%s", name);
10799 else
10800 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10801 name);
10802 }
10803 }
10804 else
10805 {
10806 gcc_assert (AT_ref (a)->die_offset);
10807 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10808 "%s", name);
10809 }
10810 break;
10811
10812 case dw_val_class_fde_ref:
10813 {
10814 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10815
10816 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10817 a->dw_attr_val.v.val_fde_index * 2);
10818 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10819 "%s", name);
10820 }
10821 break;
10822
10823 case dw_val_class_vms_delta:
10824 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10825 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10826 AT_vms_delta2 (a), AT_vms_delta1 (a),
10827 "%s", name);
10828 #else
10829 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10830 AT_vms_delta2 (a), AT_vms_delta1 (a),
10831 "%s", name);
10832 #endif
10833 break;
10834
10835 case dw_val_class_lbl_id:
10836 output_attr_index_or_value (a);
10837 break;
10838
10839 case dw_val_class_lineptr:
10840 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10841 debug_line_section, "%s", name);
10842 break;
10843
10844 case dw_val_class_macptr:
10845 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10846 debug_macinfo_section, "%s", name);
10847 break;
10848
10849 case dw_val_class_loclistsptr:
10850 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10851 debug_loc_section, "%s", name);
10852 break;
10853
10854 case dw_val_class_str:
10855 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10856 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10857 a->dw_attr_val.v.val_str->label,
10858 debug_str_section,
10859 "%s: \"%s\"", name, AT_string (a));
10860 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10861 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10862 a->dw_attr_val.v.val_str->label,
10863 debug_line_str_section,
10864 "%s: \"%s\"", name, AT_string (a));
10865 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10866 dw2_asm_output_data_uleb128 (AT_index (a),
10867 "%s: \"%s\"", name, AT_string (a));
10868 else
10869 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10870 break;
10871
10872 case dw_val_class_file:
10873 {
10874 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10875
10876 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10877 a->dw_attr_val.v.val_file->filename);
10878 break;
10879 }
10880
10881 case dw_val_class_file_implicit:
10882 if (flag_debug_asm)
10883 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10884 ASM_COMMENT_START, name,
10885 maybe_emit_file (a->dw_attr_val.v.val_file),
10886 a->dw_attr_val.v.val_file->filename);
10887 break;
10888
10889 case dw_val_class_data8:
10890 {
10891 int i;
10892
10893 for (i = 0; i < 8; i++)
10894 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10895 i == 0 ? "%s" : NULL, name);
10896 break;
10897 }
10898
10899 case dw_val_class_high_pc:
10900 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10901 get_AT_low_pc (die), "DW_AT_high_pc");
10902 break;
10903
10904 case dw_val_class_discr_value:
10905 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10906 break;
10907
10908 case dw_val_class_discr_list:
10909 {
10910 dw_discr_list_ref list = AT_discr_list (a);
10911 const int size = size_of_discr_list (list);
10912
10913 /* This is a block, so output its length first. */
10914 dw2_asm_output_data (constant_size (size), size,
10915 "%s: block size", name);
10916
10917 for (; list != NULL; list = list->dw_discr_next)
10918 {
10919 /* One byte for the discriminant value descriptor, and then as
10920 many LEB128 numbers as required. */
10921 if (list->dw_discr_range)
10922 dw2_asm_output_data (1, DW_DSC_range,
10923 "%s: DW_DSC_range", name);
10924 else
10925 dw2_asm_output_data (1, DW_DSC_label,
10926 "%s: DW_DSC_label", name);
10927
10928 output_discr_value (&list->dw_discr_lower_bound, name);
10929 if (list->dw_discr_range)
10930 output_discr_value (&list->dw_discr_upper_bound, name);
10931 }
10932 break;
10933 }
10934
10935 default:
10936 gcc_unreachable ();
10937 }
10938 }
10939
10940 FOR_EACH_CHILD (die, c, output_die (c));
10941
10942 /* Add null byte to terminate sibling list. */
10943 if (die->die_child != NULL)
10944 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10945 (unsigned long) die->die_offset);
10946 }
10947
10948 /* Output the dwarf version number. */
10949
10950 static void
10951 output_dwarf_version ()
10952 {
10953 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10954 views in loclist. That will change eventually. */
10955 if (dwarf_version == 6)
10956 {
10957 static bool once;
10958 if (!once)
10959 {
10960 warning (0,
10961 "-gdwarf-6 is output as version 5 with incompatibilities");
10962 once = true;
10963 }
10964 dw2_asm_output_data (2, 5, "DWARF version number");
10965 }
10966 else
10967 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10968 }
10969
10970 /* Output the compilation unit that appears at the beginning of the
10971 .debug_info section, and precedes the DIE descriptions. */
10972
10973 static void
10974 output_compilation_unit_header (enum dwarf_unit_type ut)
10975 {
10976 if (!XCOFF_DEBUGGING_INFO)
10977 {
10978 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10979 dw2_asm_output_data (4, 0xffffffff,
10980 "Initial length escape value indicating 64-bit DWARF extension");
10981 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10982 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10983 "Length of Compilation Unit Info");
10984 }
10985
10986 output_dwarf_version ();
10987 if (dwarf_version >= 5)
10988 {
10989 const char *name;
10990 switch (ut)
10991 {
10992 case DW_UT_compile: name = "DW_UT_compile"; break;
10993 case DW_UT_type: name = "DW_UT_type"; break;
10994 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10995 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10996 default: gcc_unreachable ();
10997 }
10998 dw2_asm_output_data (1, ut, "%s", name);
10999 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11000 }
11001 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
11002 debug_abbrev_section,
11003 "Offset Into Abbrev. Section");
11004 if (dwarf_version < 5)
11005 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11006 }
11007
11008 /* Output the compilation unit DIE and its children. */
11009
11010 static void
11011 output_comp_unit (dw_die_ref die, int output_if_empty,
11012 const unsigned char *dwo_id)
11013 {
11014 const char *secname, *oldsym;
11015 char *tmp;
11016
11017 /* Unless we are outputting main CU, we may throw away empty ones. */
11018 if (!output_if_empty && die->die_child == NULL)
11019 return;
11020
11021 /* Even if there are no children of this DIE, we must output the information
11022 about the compilation unit. Otherwise, on an empty translation unit, we
11023 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11024 will then complain when examining the file. First mark all the DIEs in
11025 this CU so we know which get local refs. */
11026 mark_dies (die);
11027
11028 external_ref_hash_type *extern_map = optimize_external_refs (die);
11029
11030 /* For now, optimize only the main CU, in order to optimize the rest
11031 we'd need to see all of them earlier. Leave the rest for post-linking
11032 tools like DWZ. */
11033 if (die == comp_unit_die ())
11034 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11035
11036 build_abbrev_table (die, extern_map);
11037
11038 optimize_abbrev_table ();
11039
11040 delete extern_map;
11041
11042 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11043 next_die_offset = (dwo_id
11044 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11045 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11046 calc_die_sizes (die);
11047
11048 oldsym = die->die_id.die_symbol;
11049 if (oldsym && die->comdat_type_p)
11050 {
11051 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11052
11053 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11054 secname = tmp;
11055 die->die_id.die_symbol = NULL;
11056 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11057 }
11058 else
11059 {
11060 switch_to_section (debug_info_section);
11061 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11062 info_section_emitted = true;
11063 }
11064
11065 /* For LTO cross unit DIE refs we want a symbol on the start of the
11066 debuginfo section, not on the CU DIE. */
11067 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11068 {
11069 /* ??? No way to get visibility assembled without a decl. */
11070 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11071 get_identifier (oldsym), char_type_node);
11072 TREE_PUBLIC (decl) = true;
11073 TREE_STATIC (decl) = true;
11074 DECL_ARTIFICIAL (decl) = true;
11075 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11076 DECL_VISIBILITY_SPECIFIED (decl) = true;
11077 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11078 #ifdef ASM_WEAKEN_LABEL
11079 /* We prefer a .weak because that handles duplicates from duplicate
11080 archive members in a graceful way. */
11081 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11082 #else
11083 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11084 #endif
11085 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11086 }
11087
11088 /* Output debugging information. */
11089 output_compilation_unit_header (dwo_id
11090 ? DW_UT_split_compile : DW_UT_compile);
11091 if (dwarf_version >= 5)
11092 {
11093 if (dwo_id != NULL)
11094 for (int i = 0; i < 8; i++)
11095 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11096 }
11097 output_die (die);
11098
11099 /* Leave the marks on the main CU, so we can check them in
11100 output_pubnames. */
11101 if (oldsym)
11102 {
11103 unmark_dies (die);
11104 die->die_id.die_symbol = oldsym;
11105 }
11106 }
11107
11108 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11109 and .debug_pubtypes. This is configured per-target, but can be
11110 overridden by the -gpubnames or -gno-pubnames options. */
11111
11112 static inline bool
11113 want_pubnames (void)
11114 {
11115 if (debug_info_level <= DINFO_LEVEL_TERSE)
11116 return false;
11117 if (debug_generate_pub_sections != -1)
11118 return debug_generate_pub_sections;
11119 return targetm.want_debug_pub_sections;
11120 }
11121
11122 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11123
11124 static void
11125 add_AT_pubnames (dw_die_ref die)
11126 {
11127 if (want_pubnames ())
11128 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11129 }
11130
11131 /* Add a string attribute value to a skeleton DIE. */
11132
11133 static inline void
11134 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11135 const char *str)
11136 {
11137 dw_attr_node attr;
11138 struct indirect_string_node *node;
11139
11140 if (! skeleton_debug_str_hash)
11141 skeleton_debug_str_hash
11142 = hash_table<indirect_string_hasher>::create_ggc (10);
11143
11144 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11145 find_string_form (node);
11146 if (node->form == dwarf_FORM (DW_FORM_strx))
11147 node->form = DW_FORM_strp;
11148
11149 attr.dw_attr = attr_kind;
11150 attr.dw_attr_val.val_class = dw_val_class_str;
11151 attr.dw_attr_val.val_entry = NULL;
11152 attr.dw_attr_val.v.val_str = node;
11153 add_dwarf_attr (die, &attr);
11154 }
11155
11156 /* Helper function to generate top-level dies for skeleton debug_info and
11157 debug_types. */
11158
11159 static void
11160 add_top_level_skeleton_die_attrs (dw_die_ref die)
11161 {
11162 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11163 const char *comp_dir = comp_dir_string ();
11164
11165 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11166 if (comp_dir != NULL)
11167 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11168 add_AT_pubnames (die);
11169 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11170 }
11171
11172 /* Output skeleton debug sections that point to the dwo file. */
11173
11174 static void
11175 output_skeleton_debug_sections (dw_die_ref comp_unit,
11176 const unsigned char *dwo_id)
11177 {
11178 /* These attributes will be found in the full debug_info section. */
11179 remove_AT (comp_unit, DW_AT_producer);
11180 remove_AT (comp_unit, DW_AT_language);
11181
11182 switch_to_section (debug_skeleton_info_section);
11183 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11184
11185 /* Produce the skeleton compilation-unit header. This one differs enough from
11186 a normal CU header that it's better not to call output_compilation_unit
11187 header. */
11188 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11189 dw2_asm_output_data (4, 0xffffffff,
11190 "Initial length escape value indicating 64-bit "
11191 "DWARF extension");
11192
11193 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11194 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11195 - DWARF_INITIAL_LENGTH_SIZE
11196 + size_of_die (comp_unit),
11197 "Length of Compilation Unit Info");
11198 output_dwarf_version ();
11199 if (dwarf_version >= 5)
11200 {
11201 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11202 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11203 }
11204 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11205 debug_skeleton_abbrev_section,
11206 "Offset Into Abbrev. Section");
11207 if (dwarf_version < 5)
11208 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11209 else
11210 for (int i = 0; i < 8; i++)
11211 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11212
11213 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11214 output_die (comp_unit);
11215
11216 /* Build the skeleton debug_abbrev section. */
11217 switch_to_section (debug_skeleton_abbrev_section);
11218 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11219
11220 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11221
11222 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11223 }
11224
11225 /* Output a comdat type unit DIE and its children. */
11226
11227 static void
11228 output_comdat_type_unit (comdat_type_node *node)
11229 {
11230 const char *secname;
11231 char *tmp;
11232 int i;
11233 #if defined (OBJECT_FORMAT_ELF)
11234 tree comdat_key;
11235 #endif
11236
11237 /* First mark all the DIEs in this CU so we know which get local refs. */
11238 mark_dies (node->root_die);
11239
11240 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11241
11242 build_abbrev_table (node->root_die, extern_map);
11243
11244 delete extern_map;
11245 extern_map = NULL;
11246
11247 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11248 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11249 calc_die_sizes (node->root_die);
11250
11251 #if defined (OBJECT_FORMAT_ELF)
11252 if (dwarf_version >= 5)
11253 {
11254 if (!dwarf_split_debug_info)
11255 secname = ".debug_info";
11256 else
11257 secname = ".debug_info.dwo";
11258 }
11259 else if (!dwarf_split_debug_info)
11260 secname = ".debug_types";
11261 else
11262 secname = ".debug_types.dwo";
11263
11264 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11265 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11266 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11267 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11268 comdat_key = get_identifier (tmp);
11269 targetm.asm_out.named_section (secname,
11270 SECTION_DEBUG | SECTION_LINKONCE,
11271 comdat_key);
11272 #else
11273 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11274 sprintf (tmp, (dwarf_version >= 5
11275 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11276 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11277 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11278 secname = tmp;
11279 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11280 #endif
11281
11282 /* Output debugging information. */
11283 output_compilation_unit_header (dwarf_split_debug_info
11284 ? DW_UT_split_type : DW_UT_type);
11285 output_signature (node->signature, "Type Signature");
11286 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11287 "Offset to Type DIE");
11288 output_die (node->root_die);
11289
11290 unmark_dies (node->root_die);
11291 }
11292
11293 /* Return the DWARF2/3 pubname associated with a decl. */
11294
11295 static const char *
11296 dwarf2_name (tree decl, int scope)
11297 {
11298 if (DECL_NAMELESS (decl))
11299 return NULL;
11300 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11301 }
11302
11303 /* Add a new entry to .debug_pubnames if appropriate. */
11304
11305 static void
11306 add_pubname_string (const char *str, dw_die_ref die)
11307 {
11308 pubname_entry e;
11309
11310 e.die = die;
11311 e.name = xstrdup (str);
11312 vec_safe_push (pubname_table, e);
11313 }
11314
11315 static void
11316 add_pubname (tree decl, dw_die_ref die)
11317 {
11318 if (!want_pubnames ())
11319 return;
11320
11321 /* Don't add items to the table when we expect that the consumer will have
11322 just read the enclosing die. For example, if the consumer is looking at a
11323 class_member, it will either be inside the class already, or will have just
11324 looked up the class to find the member. Either way, searching the class is
11325 faster than searching the index. */
11326 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11327 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11328 {
11329 const char *name = dwarf2_name (decl, 1);
11330
11331 if (name)
11332 add_pubname_string (name, die);
11333 }
11334 }
11335
11336 /* Add an enumerator to the pubnames section. */
11337
11338 static void
11339 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11340 {
11341 pubname_entry e;
11342
11343 gcc_assert (scope_name);
11344 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11345 e.die = die;
11346 vec_safe_push (pubname_table, e);
11347 }
11348
11349 /* Add a new entry to .debug_pubtypes if appropriate. */
11350
11351 static void
11352 add_pubtype (tree decl, dw_die_ref die)
11353 {
11354 pubname_entry e;
11355
11356 if (!want_pubnames ())
11357 return;
11358
11359 if ((TREE_PUBLIC (decl)
11360 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11361 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11362 {
11363 tree scope = NULL;
11364 const char *scope_name = "";
11365 const char *sep = is_cxx () ? "::" : ".";
11366 const char *name;
11367
11368 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11369 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11370 {
11371 scope_name = lang_hooks.dwarf_name (scope, 1);
11372 if (scope_name != NULL && scope_name[0] != '\0')
11373 scope_name = concat (scope_name, sep, NULL);
11374 else
11375 scope_name = "";
11376 }
11377
11378 if (TYPE_P (decl))
11379 name = type_tag (decl);
11380 else
11381 name = lang_hooks.dwarf_name (decl, 1);
11382
11383 /* If we don't have a name for the type, there's no point in adding
11384 it to the table. */
11385 if (name != NULL && name[0] != '\0')
11386 {
11387 e.die = die;
11388 e.name = concat (scope_name, name, NULL);
11389 vec_safe_push (pubtype_table, e);
11390 }
11391
11392 /* Although it might be more consistent to add the pubinfo for the
11393 enumerators as their dies are created, they should only be added if the
11394 enum type meets the criteria above. So rather than re-check the parent
11395 enum type whenever an enumerator die is created, just output them all
11396 here. This isn't protected by the name conditional because anonymous
11397 enums don't have names. */
11398 if (die->die_tag == DW_TAG_enumeration_type)
11399 {
11400 dw_die_ref c;
11401
11402 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11403 }
11404 }
11405 }
11406
11407 /* Output a single entry in the pubnames table. */
11408
11409 static void
11410 output_pubname (dw_offset die_offset, pubname_entry *entry)
11411 {
11412 dw_die_ref die = entry->die;
11413 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11414
11415 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11416
11417 if (debug_generate_pub_sections == 2)
11418 {
11419 /* This logic follows gdb's method for determining the value of the flag
11420 byte. */
11421 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11422 switch (die->die_tag)
11423 {
11424 case DW_TAG_typedef:
11425 case DW_TAG_base_type:
11426 case DW_TAG_subrange_type:
11427 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11428 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11429 break;
11430 case DW_TAG_enumerator:
11431 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11432 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11433 if (!is_cxx ())
11434 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11435 break;
11436 case DW_TAG_subprogram:
11437 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11438 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11439 if (!is_ada ())
11440 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11441 break;
11442 case DW_TAG_constant:
11443 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11444 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11445 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11446 break;
11447 case DW_TAG_variable:
11448 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11449 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11450 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11451 break;
11452 case DW_TAG_namespace:
11453 case DW_TAG_imported_declaration:
11454 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11455 break;
11456 case DW_TAG_class_type:
11457 case DW_TAG_interface_type:
11458 case DW_TAG_structure_type:
11459 case DW_TAG_union_type:
11460 case DW_TAG_enumeration_type:
11461 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11462 if (!is_cxx ())
11463 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11464 break;
11465 default:
11466 /* An unusual tag. Leave the flag-byte empty. */
11467 break;
11468 }
11469 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11470 "GDB-index flags");
11471 }
11472
11473 dw2_asm_output_nstring (entry->name, -1, "external name");
11474 }
11475
11476
11477 /* Output the public names table used to speed up access to externally
11478 visible names; or the public types table used to find type definitions. */
11479
11480 static void
11481 output_pubnames (vec<pubname_entry, va_gc> *names)
11482 {
11483 unsigned i;
11484 unsigned long pubnames_length = size_of_pubnames (names);
11485 pubname_entry *pub;
11486
11487 if (!XCOFF_DEBUGGING_INFO)
11488 {
11489 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11490 dw2_asm_output_data (4, 0xffffffff,
11491 "Initial length escape value indicating 64-bit DWARF extension");
11492 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11493 "Pub Info Length");
11494 }
11495
11496 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11497 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11498
11499 if (dwarf_split_debug_info)
11500 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11501 debug_skeleton_info_section,
11502 "Offset of Compilation Unit Info");
11503 else
11504 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11505 debug_info_section,
11506 "Offset of Compilation Unit Info");
11507 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11508 "Compilation Unit Length");
11509
11510 FOR_EACH_VEC_ELT (*names, i, pub)
11511 {
11512 if (include_pubname_in_output (names, pub))
11513 {
11514 dw_offset die_offset = pub->die->die_offset;
11515
11516 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11517 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11518 gcc_assert (pub->die->die_mark);
11519
11520 /* If we're putting types in their own .debug_types sections,
11521 the .debug_pubtypes table will still point to the compile
11522 unit (not the type unit), so we want to use the offset of
11523 the skeleton DIE (if there is one). */
11524 if (pub->die->comdat_type_p && names == pubtype_table)
11525 {
11526 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11527
11528 if (type_node != NULL)
11529 die_offset = (type_node->skeleton_die != NULL
11530 ? type_node->skeleton_die->die_offset
11531 : comp_unit_die ()->die_offset);
11532 }
11533
11534 output_pubname (die_offset, pub);
11535 }
11536 }
11537
11538 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11539 }
11540
11541 /* Output public names and types tables if necessary. */
11542
11543 static void
11544 output_pubtables (void)
11545 {
11546 if (!want_pubnames () || !info_section_emitted)
11547 return;
11548
11549 switch_to_section (debug_pubnames_section);
11550 output_pubnames (pubname_table);
11551 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11552 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11553 simply won't look for the section. */
11554 switch_to_section (debug_pubtypes_section);
11555 output_pubnames (pubtype_table);
11556 }
11557
11558
11559 /* Output the information that goes into the .debug_aranges table.
11560 Namely, define the beginning and ending address range of the
11561 text section generated for this compilation unit. */
11562
11563 static void
11564 output_aranges (void)
11565 {
11566 unsigned i;
11567 unsigned long aranges_length = size_of_aranges ();
11568
11569 if (!XCOFF_DEBUGGING_INFO)
11570 {
11571 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11572 dw2_asm_output_data (4, 0xffffffff,
11573 "Initial length escape value indicating 64-bit DWARF extension");
11574 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11575 "Length of Address Ranges Info");
11576 }
11577
11578 /* Version number for aranges is still 2, even up to DWARF5. */
11579 dw2_asm_output_data (2, 2, "DWARF aranges version");
11580 if (dwarf_split_debug_info)
11581 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11582 debug_skeleton_info_section,
11583 "Offset of Compilation Unit Info");
11584 else
11585 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11586 debug_info_section,
11587 "Offset of Compilation Unit Info");
11588 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11589 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11590
11591 /* We need to align to twice the pointer size here. */
11592 if (DWARF_ARANGES_PAD_SIZE)
11593 {
11594 /* Pad using a 2 byte words so that padding is correct for any
11595 pointer size. */
11596 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11597 2 * DWARF2_ADDR_SIZE);
11598 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11599 dw2_asm_output_data (2, 0, NULL);
11600 }
11601
11602 /* It is necessary not to output these entries if the sections were
11603 not used; if the sections were not used, the length will be 0 and
11604 the address may end up as 0 if the section is discarded by ld
11605 --gc-sections, leaving an invalid (0, 0) entry that can be
11606 confused with the terminator. */
11607 if (text_section_used)
11608 {
11609 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11610 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11611 text_section_label, "Length");
11612 }
11613 if (cold_text_section_used)
11614 {
11615 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11616 "Address");
11617 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11618 cold_text_section_label, "Length");
11619 }
11620
11621 if (have_multiple_function_sections)
11622 {
11623 unsigned fde_idx;
11624 dw_fde_ref fde;
11625
11626 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11627 {
11628 if (DECL_IGNORED_P (fde->decl))
11629 continue;
11630 if (!fde->in_std_section)
11631 {
11632 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11633 "Address");
11634 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11635 fde->dw_fde_begin, "Length");
11636 }
11637 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11638 {
11639 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11640 "Address");
11641 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11642 fde->dw_fde_second_begin, "Length");
11643 }
11644 }
11645 }
11646
11647 /* Output the terminator words. */
11648 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11649 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11650 }
11651
11652 /* Add a new entry to .debug_ranges. Return its index into
11653 ranges_table vector. */
11654
11655 static unsigned int
11656 add_ranges_num (int num, bool maybe_new_sec)
11657 {
11658 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11659 vec_safe_push (ranges_table, r);
11660 return vec_safe_length (ranges_table) - 1;
11661 }
11662
11663 /* Add a new entry to .debug_ranges corresponding to a block, or a
11664 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11665 this entry might be in a different section from previous range. */
11666
11667 static unsigned int
11668 add_ranges (const_tree block, bool maybe_new_sec)
11669 {
11670 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11671 }
11672
11673 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11674 chain, or middle entry of a chain that will be directly referred to. */
11675
11676 static void
11677 note_rnglist_head (unsigned int offset)
11678 {
11679 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11680 return;
11681 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11682 }
11683
11684 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11685 When using dwarf_split_debug_info, address attributes in dies destined
11686 for the final executable should be direct references--setting the
11687 parameter force_direct ensures this behavior. */
11688
11689 static void
11690 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11691 bool *added, bool force_direct)
11692 {
11693 unsigned int in_use = vec_safe_length (ranges_by_label);
11694 unsigned int offset;
11695 dw_ranges_by_label rbl = { begin, end };
11696 vec_safe_push (ranges_by_label, rbl);
11697 offset = add_ranges_num (-(int)in_use - 1, true);
11698 if (!*added)
11699 {
11700 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11701 *added = true;
11702 note_rnglist_head (offset);
11703 }
11704 }
11705
11706 /* Emit .debug_ranges section. */
11707
11708 static void
11709 output_ranges (void)
11710 {
11711 unsigned i;
11712 static const char *const start_fmt = "Offset %#x";
11713 const char *fmt = start_fmt;
11714 dw_ranges *r;
11715
11716 switch_to_section (debug_ranges_section);
11717 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11718 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11719 {
11720 int block_num = r->num;
11721
11722 if (block_num > 0)
11723 {
11724 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11725 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11726
11727 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11728 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11729
11730 /* If all code is in the text section, then the compilation
11731 unit base address defaults to DW_AT_low_pc, which is the
11732 base of the text section. */
11733 if (!have_multiple_function_sections)
11734 {
11735 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11736 text_section_label,
11737 fmt, i * 2 * DWARF2_ADDR_SIZE);
11738 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11739 text_section_label, NULL);
11740 }
11741
11742 /* Otherwise, the compilation unit base address is zero,
11743 which allows us to use absolute addresses, and not worry
11744 about whether the target supports cross-section
11745 arithmetic. */
11746 else
11747 {
11748 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11749 fmt, i * 2 * DWARF2_ADDR_SIZE);
11750 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11751 }
11752
11753 fmt = NULL;
11754 }
11755
11756 /* Negative block_num stands for an index into ranges_by_label. */
11757 else if (block_num < 0)
11758 {
11759 int lab_idx = - block_num - 1;
11760
11761 if (!have_multiple_function_sections)
11762 {
11763 gcc_unreachable ();
11764 #if 0
11765 /* If we ever use add_ranges_by_labels () for a single
11766 function section, all we have to do is to take out
11767 the #if 0 above. */
11768 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11769 (*ranges_by_label)[lab_idx].begin,
11770 text_section_label,
11771 fmt, i * 2 * DWARF2_ADDR_SIZE);
11772 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11773 (*ranges_by_label)[lab_idx].end,
11774 text_section_label, NULL);
11775 #endif
11776 }
11777 else
11778 {
11779 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11780 (*ranges_by_label)[lab_idx].begin,
11781 fmt, i * 2 * DWARF2_ADDR_SIZE);
11782 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11783 (*ranges_by_label)[lab_idx].end,
11784 NULL);
11785 }
11786 }
11787 else
11788 {
11789 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11790 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11791 fmt = start_fmt;
11792 }
11793 }
11794 }
11795
11796 /* Non-zero if .debug_line_str should be used for .debug_line section
11797 strings or strings that are likely shareable with those. */
11798 #define DWARF5_USE_DEBUG_LINE_STR \
11799 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11800 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11801 /* FIXME: there is no .debug_line_str.dwo section, \
11802 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11803 && !dwarf_split_debug_info)
11804
11805 /* Assign .debug_rnglists indexes. */
11806
11807 static void
11808 index_rnglists (void)
11809 {
11810 unsigned i;
11811 dw_ranges *r;
11812
11813 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11814 if (r->label)
11815 r->idx = rnglist_idx++;
11816 }
11817
11818 /* Emit .debug_rnglists section. */
11819
11820 static void
11821 output_rnglists (unsigned generation)
11822 {
11823 unsigned i;
11824 dw_ranges *r;
11825 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11826 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11827 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11828
11829 switch_to_section (debug_ranges_section);
11830 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11831 /* There are up to 4 unique ranges labels per generation.
11832 See also init_sections_and_labels. */
11833 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11834 2 + generation * 4);
11835 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11836 3 + generation * 4);
11837 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11838 dw2_asm_output_data (4, 0xffffffff,
11839 "Initial length escape value indicating "
11840 "64-bit DWARF extension");
11841 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11842 "Length of Range Lists");
11843 ASM_OUTPUT_LABEL (asm_out_file, l1);
11844 output_dwarf_version ();
11845 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11846 dw2_asm_output_data (1, 0, "Segment Size");
11847 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11848 about relocation sizes and primarily care about the size of .debug*
11849 sections in linked shared libraries and executables, then
11850 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11851 into it are usually larger than just DW_FORM_sec_offset offsets
11852 into the .debug_rnglists section. */
11853 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11854 "Offset Entry Count");
11855 if (dwarf_split_debug_info)
11856 {
11857 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11858 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11859 if (r->label)
11860 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11861 ranges_base_label, NULL);
11862 }
11863
11864 const char *lab = "";
11865 unsigned int len = vec_safe_length (ranges_table);
11866 const char *base = NULL;
11867 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11868 {
11869 int block_num = r->num;
11870
11871 if (r->label)
11872 {
11873 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11874 lab = r->label;
11875 }
11876 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11877 base = NULL;
11878 if (block_num > 0)
11879 {
11880 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11881 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11882
11883 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11884 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11885
11886 if (HAVE_AS_LEB128)
11887 {
11888 /* If all code is in the text section, then the compilation
11889 unit base address defaults to DW_AT_low_pc, which is the
11890 base of the text section. */
11891 if (!have_multiple_function_sections)
11892 {
11893 dw2_asm_output_data (1, DW_RLE_offset_pair,
11894 "DW_RLE_offset_pair (%s)", lab);
11895 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11896 "Range begin address (%s)", lab);
11897 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11898 "Range end address (%s)", lab);
11899 continue;
11900 }
11901 if (base == NULL)
11902 {
11903 dw_ranges *r2 = NULL;
11904 if (i < len - 1)
11905 r2 = &(*ranges_table)[i + 1];
11906 if (r2
11907 && r2->num != 0
11908 && r2->label == NULL
11909 && !r2->maybe_new_sec)
11910 {
11911 dw2_asm_output_data (1, DW_RLE_base_address,
11912 "DW_RLE_base_address (%s)", lab);
11913 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11914 "Base address (%s)", lab);
11915 strcpy (basebuf, blabel);
11916 base = basebuf;
11917 }
11918 }
11919 if (base)
11920 {
11921 dw2_asm_output_data (1, DW_RLE_offset_pair,
11922 "DW_RLE_offset_pair (%s)", lab);
11923 dw2_asm_output_delta_uleb128 (blabel, base,
11924 "Range begin address (%s)", lab);
11925 dw2_asm_output_delta_uleb128 (elabel, base,
11926 "Range end address (%s)", lab);
11927 continue;
11928 }
11929 dw2_asm_output_data (1, DW_RLE_start_length,
11930 "DW_RLE_start_length (%s)", lab);
11931 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11932 "Range begin address (%s)", lab);
11933 dw2_asm_output_delta_uleb128 (elabel, blabel,
11934 "Range length (%s)", lab);
11935 }
11936 else
11937 {
11938 dw2_asm_output_data (1, DW_RLE_start_end,
11939 "DW_RLE_start_end (%s)", lab);
11940 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11941 "Range begin address (%s)", lab);
11942 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11943 "Range end address (%s)", lab);
11944 }
11945 }
11946
11947 /* Negative block_num stands for an index into ranges_by_label. */
11948 else if (block_num < 0)
11949 {
11950 int lab_idx = - block_num - 1;
11951 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11952 const char *elabel = (*ranges_by_label)[lab_idx].end;
11953
11954 if (!have_multiple_function_sections)
11955 gcc_unreachable ();
11956 if (HAVE_AS_LEB128)
11957 {
11958 dw2_asm_output_data (1, DW_RLE_start_length,
11959 "DW_RLE_start_length (%s)", lab);
11960 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11961 "Range begin address (%s)", lab);
11962 dw2_asm_output_delta_uleb128 (elabel, blabel,
11963 "Range length (%s)", lab);
11964 }
11965 else
11966 {
11967 dw2_asm_output_data (1, DW_RLE_start_end,
11968 "DW_RLE_start_end (%s)", lab);
11969 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11970 "Range begin address (%s)", lab);
11971 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11972 "Range end address (%s)", lab);
11973 }
11974 }
11975 else
11976 dw2_asm_output_data (1, DW_RLE_end_of_list,
11977 "DW_RLE_end_of_list (%s)", lab);
11978 }
11979 ASM_OUTPUT_LABEL (asm_out_file, l2);
11980 }
11981
11982 /* Data structure containing information about input files. */
11983 struct file_info
11984 {
11985 const char *path; /* Complete file name. */
11986 const char *fname; /* File name part. */
11987 int length; /* Length of entire string. */
11988 struct dwarf_file_data * file_idx; /* Index in input file table. */
11989 int dir_idx; /* Index in directory table. */
11990 };
11991
11992 /* Data structure containing information about directories with source
11993 files. */
11994 struct dir_info
11995 {
11996 const char *path; /* Path including directory name. */
11997 int length; /* Path length. */
11998 int prefix; /* Index of directory entry which is a prefix. */
11999 int count; /* Number of files in this directory. */
12000 int dir_idx; /* Index of directory used as base. */
12001 };
12002
12003 /* Callback function for file_info comparison. We sort by looking at
12004 the directories in the path. */
12005
12006 static int
12007 file_info_cmp (const void *p1, const void *p2)
12008 {
12009 const struct file_info *const s1 = (const struct file_info *) p1;
12010 const struct file_info *const s2 = (const struct file_info *) p2;
12011 const unsigned char *cp1;
12012 const unsigned char *cp2;
12013
12014 /* Take care of file names without directories. We need to make sure that
12015 we return consistent values to qsort since some will get confused if
12016 we return the same value when identical operands are passed in opposite
12017 orders. So if neither has a directory, return 0 and otherwise return
12018 1 or -1 depending on which one has the directory. We want the one with
12019 the directory to sort after the one without, so all no directory files
12020 are at the start (normally only the compilation unit file). */
12021 if ((s1->path == s1->fname || s2->path == s2->fname))
12022 return (s2->path == s2->fname) - (s1->path == s1->fname);
12023
12024 cp1 = (const unsigned char *) s1->path;
12025 cp2 = (const unsigned char *) s2->path;
12026
12027 while (1)
12028 {
12029 ++cp1;
12030 ++cp2;
12031 /* Reached the end of the first path? If so, handle like above,
12032 but now we want longer directory prefixes before shorter ones. */
12033 if ((cp1 == (const unsigned char *) s1->fname)
12034 || (cp2 == (const unsigned char *) s2->fname))
12035 return ((cp1 == (const unsigned char *) s1->fname)
12036 - (cp2 == (const unsigned char *) s2->fname));
12037
12038 /* Character of current path component the same? */
12039 else if (*cp1 != *cp2)
12040 return *cp1 - *cp2;
12041 }
12042 }
12043
12044 struct file_name_acquire_data
12045 {
12046 struct file_info *files;
12047 int used_files;
12048 int max_files;
12049 };
12050
12051 /* Traversal function for the hash table. */
12052
12053 int
12054 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12055 {
12056 struct dwarf_file_data *d = *slot;
12057 struct file_info *fi;
12058 const char *f;
12059
12060 gcc_assert (fnad->max_files >= d->emitted_number);
12061
12062 if (! d->emitted_number)
12063 return 1;
12064
12065 gcc_assert (fnad->max_files != fnad->used_files);
12066
12067 fi = fnad->files + fnad->used_files++;
12068
12069 /* Skip all leading "./". */
12070 f = d->filename;
12071 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12072 f += 2;
12073
12074 /* Create a new array entry. */
12075 fi->path = f;
12076 fi->length = strlen (f);
12077 fi->file_idx = d;
12078
12079 /* Search for the file name part. */
12080 f = strrchr (f, DIR_SEPARATOR);
12081 #if defined (DIR_SEPARATOR_2)
12082 {
12083 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12084
12085 if (g != NULL)
12086 {
12087 if (f == NULL || f < g)
12088 f = g;
12089 }
12090 }
12091 #endif
12092
12093 fi->fname = f == NULL ? fi->path : f + 1;
12094 return 1;
12095 }
12096
12097 /* Helper function for output_file_names. Emit a FORM encoded
12098 string STR, with assembly comment start ENTRY_KIND and
12099 index IDX */
12100
12101 static void
12102 output_line_string (enum dwarf_form form, const char *str,
12103 const char *entry_kind, unsigned int idx)
12104 {
12105 switch (form)
12106 {
12107 case DW_FORM_string:
12108 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12109 break;
12110 case DW_FORM_line_strp:
12111 if (!debug_line_str_hash)
12112 debug_line_str_hash
12113 = hash_table<indirect_string_hasher>::create_ggc (10);
12114
12115 struct indirect_string_node *node;
12116 node = find_AT_string_in_table (str, debug_line_str_hash);
12117 set_indirect_string (node);
12118 node->form = form;
12119 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12120 debug_line_str_section, "%s: %#x: \"%s\"",
12121 entry_kind, 0, node->str);
12122 break;
12123 default:
12124 gcc_unreachable ();
12125 }
12126 }
12127
12128 /* Output the directory table and the file name table. We try to minimize
12129 the total amount of memory needed. A heuristic is used to avoid large
12130 slowdowns with many input files. */
12131
12132 static void
12133 output_file_names (void)
12134 {
12135 struct file_name_acquire_data fnad;
12136 int numfiles;
12137 struct file_info *files;
12138 struct dir_info *dirs;
12139 int *saved;
12140 int *savehere;
12141 int *backmap;
12142 int ndirs;
12143 int idx_offset;
12144 int i;
12145
12146 if (!last_emitted_file)
12147 {
12148 if (dwarf_version >= 5)
12149 {
12150 dw2_asm_output_data (1, 0, "Directory entry format count");
12151 dw2_asm_output_data_uleb128 (0, "Directories count");
12152 dw2_asm_output_data (1, 0, "File name entry format count");
12153 dw2_asm_output_data_uleb128 (0, "File names count");
12154 }
12155 else
12156 {
12157 dw2_asm_output_data (1, 0, "End directory table");
12158 dw2_asm_output_data (1, 0, "End file name table");
12159 }
12160 return;
12161 }
12162
12163 numfiles = last_emitted_file->emitted_number;
12164
12165 /* Allocate the various arrays we need. */
12166 files = XALLOCAVEC (struct file_info, numfiles);
12167 dirs = XALLOCAVEC (struct dir_info, numfiles);
12168
12169 fnad.files = files;
12170 fnad.used_files = 0;
12171 fnad.max_files = numfiles;
12172 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12173 gcc_assert (fnad.used_files == fnad.max_files);
12174
12175 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12176
12177 /* Find all the different directories used. */
12178 dirs[0].path = files[0].path;
12179 dirs[0].length = files[0].fname - files[0].path;
12180 dirs[0].prefix = -1;
12181 dirs[0].count = 1;
12182 dirs[0].dir_idx = 0;
12183 files[0].dir_idx = 0;
12184 ndirs = 1;
12185
12186 for (i = 1; i < numfiles; i++)
12187 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12188 && memcmp (dirs[ndirs - 1].path, files[i].path,
12189 dirs[ndirs - 1].length) == 0)
12190 {
12191 /* Same directory as last entry. */
12192 files[i].dir_idx = ndirs - 1;
12193 ++dirs[ndirs - 1].count;
12194 }
12195 else
12196 {
12197 int j;
12198
12199 /* This is a new directory. */
12200 dirs[ndirs].path = files[i].path;
12201 dirs[ndirs].length = files[i].fname - files[i].path;
12202 dirs[ndirs].count = 1;
12203 dirs[ndirs].dir_idx = ndirs;
12204 files[i].dir_idx = ndirs;
12205
12206 /* Search for a prefix. */
12207 dirs[ndirs].prefix = -1;
12208 for (j = 0; j < ndirs; j++)
12209 if (dirs[j].length < dirs[ndirs].length
12210 && dirs[j].length > 1
12211 && (dirs[ndirs].prefix == -1
12212 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12213 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12214 dirs[ndirs].prefix = j;
12215
12216 ++ndirs;
12217 }
12218
12219 /* Now to the actual work. We have to find a subset of the directories which
12220 allow expressing the file name using references to the directory table
12221 with the least amount of characters. We do not do an exhaustive search
12222 where we would have to check out every combination of every single
12223 possible prefix. Instead we use a heuristic which provides nearly optimal
12224 results in most cases and never is much off. */
12225 saved = XALLOCAVEC (int, ndirs);
12226 savehere = XALLOCAVEC (int, ndirs);
12227
12228 memset (saved, '\0', ndirs * sizeof (saved[0]));
12229 for (i = 0; i < ndirs; i++)
12230 {
12231 int j;
12232 int total;
12233
12234 /* We can always save some space for the current directory. But this
12235 does not mean it will be enough to justify adding the directory. */
12236 savehere[i] = dirs[i].length;
12237 total = (savehere[i] - saved[i]) * dirs[i].count;
12238
12239 for (j = i + 1; j < ndirs; j++)
12240 {
12241 savehere[j] = 0;
12242 if (saved[j] < dirs[i].length)
12243 {
12244 /* Determine whether the dirs[i] path is a prefix of the
12245 dirs[j] path. */
12246 int k;
12247
12248 k = dirs[j].prefix;
12249 while (k != -1 && k != (int) i)
12250 k = dirs[k].prefix;
12251
12252 if (k == (int) i)
12253 {
12254 /* Yes it is. We can possibly save some memory by
12255 writing the filenames in dirs[j] relative to
12256 dirs[i]. */
12257 savehere[j] = dirs[i].length;
12258 total += (savehere[j] - saved[j]) * dirs[j].count;
12259 }
12260 }
12261 }
12262
12263 /* Check whether we can save enough to justify adding the dirs[i]
12264 directory. */
12265 if (total > dirs[i].length + 1)
12266 {
12267 /* It's worthwhile adding. */
12268 for (j = i; j < ndirs; j++)
12269 if (savehere[j] > 0)
12270 {
12271 /* Remember how much we saved for this directory so far. */
12272 saved[j] = savehere[j];
12273
12274 /* Remember the prefix directory. */
12275 dirs[j].dir_idx = i;
12276 }
12277 }
12278 }
12279
12280 /* Emit the directory name table. */
12281 idx_offset = dirs[0].length > 0 ? 1 : 0;
12282 enum dwarf_form str_form = DW_FORM_string;
12283 enum dwarf_form idx_form = DW_FORM_udata;
12284 if (dwarf_version >= 5)
12285 {
12286 const char *comp_dir = comp_dir_string ();
12287 if (comp_dir == NULL)
12288 comp_dir = "";
12289 dw2_asm_output_data (1, 1, "Directory entry format count");
12290 if (DWARF5_USE_DEBUG_LINE_STR)
12291 str_form = DW_FORM_line_strp;
12292 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12293 dw2_asm_output_data_uleb128 (str_form, "%s",
12294 get_DW_FORM_name (str_form));
12295 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12296 if (str_form == DW_FORM_string)
12297 {
12298 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12299 for (i = 1 - idx_offset; i < ndirs; i++)
12300 dw2_asm_output_nstring (dirs[i].path,
12301 dirs[i].length
12302 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12303 "Directory Entry: %#x", i + idx_offset);
12304 }
12305 else
12306 {
12307 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12308 for (i = 1 - idx_offset; i < ndirs; i++)
12309 {
12310 const char *str
12311 = ggc_alloc_string (dirs[i].path,
12312 dirs[i].length
12313 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12314 output_line_string (str_form, str, "Directory Entry",
12315 (unsigned) i + idx_offset);
12316 }
12317 }
12318 }
12319 else
12320 {
12321 for (i = 1 - idx_offset; i < ndirs; i++)
12322 dw2_asm_output_nstring (dirs[i].path,
12323 dirs[i].length
12324 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12325 "Directory Entry: %#x", i + idx_offset);
12326
12327 dw2_asm_output_data (1, 0, "End directory table");
12328 }
12329
12330 /* We have to emit them in the order of emitted_number since that's
12331 used in the debug info generation. To do this efficiently we
12332 generate a back-mapping of the indices first. */
12333 backmap = XALLOCAVEC (int, numfiles);
12334 for (i = 0; i < numfiles; i++)
12335 backmap[files[i].file_idx->emitted_number - 1] = i;
12336
12337 if (dwarf_version >= 5)
12338 {
12339 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12340 if (filename0 == NULL)
12341 filename0 = "";
12342 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12343 DW_FORM_data2. Choose one based on the number of directories
12344 and how much space would they occupy in each encoding.
12345 If we have at most 256 directories, all indexes fit into
12346 a single byte, so DW_FORM_data1 is most compact (if there
12347 are at most 128 directories, DW_FORM_udata would be as
12348 compact as that, but not shorter and slower to decode). */
12349 if (ndirs + idx_offset <= 256)
12350 idx_form = DW_FORM_data1;
12351 /* If there are more than 65536 directories, we have to use
12352 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12353 Otherwise, compute what space would occupy if all the indexes
12354 used DW_FORM_udata - sum - and compare that to how large would
12355 be DW_FORM_data2 encoding, and pick the more efficient one. */
12356 else if (ndirs + idx_offset <= 65536)
12357 {
12358 unsigned HOST_WIDE_INT sum = 1;
12359 for (i = 0; i < numfiles; i++)
12360 {
12361 int file_idx = backmap[i];
12362 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12363 sum += size_of_uleb128 (dir_idx);
12364 }
12365 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12366 idx_form = DW_FORM_data2;
12367 }
12368 #ifdef VMS_DEBUGGING_INFO
12369 dw2_asm_output_data (1, 4, "File name entry format count");
12370 #else
12371 dw2_asm_output_data (1, 2, "File name entry format count");
12372 #endif
12373 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12374 dw2_asm_output_data_uleb128 (str_form, "%s",
12375 get_DW_FORM_name (str_form));
12376 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12377 "DW_LNCT_directory_index");
12378 dw2_asm_output_data_uleb128 (idx_form, "%s",
12379 get_DW_FORM_name (idx_form));
12380 #ifdef VMS_DEBUGGING_INFO
12381 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12382 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12383 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12384 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12385 #endif
12386 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12387
12388 output_line_string (str_form, filename0, "File Entry", 0);
12389
12390 /* Include directory index. */
12391 if (idx_form != DW_FORM_udata)
12392 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12393 0, NULL);
12394 else
12395 dw2_asm_output_data_uleb128 (0, NULL);
12396
12397 #ifdef VMS_DEBUGGING_INFO
12398 dw2_asm_output_data_uleb128 (0, NULL);
12399 dw2_asm_output_data_uleb128 (0, NULL);
12400 #endif
12401 }
12402
12403 /* Now write all the file names. */
12404 for (i = 0; i < numfiles; i++)
12405 {
12406 int file_idx = backmap[i];
12407 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12408
12409 #ifdef VMS_DEBUGGING_INFO
12410 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12411
12412 /* Setting these fields can lead to debugger miscomparisons,
12413 but VMS Debug requires them to be set correctly. */
12414
12415 int ver;
12416 long long cdt;
12417 long siz;
12418 int maxfilelen = (strlen (files[file_idx].path)
12419 + dirs[dir_idx].length
12420 + MAX_VMS_VERSION_LEN + 1);
12421 char *filebuf = XALLOCAVEC (char, maxfilelen);
12422
12423 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12424 snprintf (filebuf, maxfilelen, "%s;%d",
12425 files[file_idx].path + dirs[dir_idx].length, ver);
12426
12427 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12428
12429 /* Include directory index. */
12430 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12431 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12432 dir_idx + idx_offset, NULL);
12433 else
12434 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12435
12436 /* Modification time. */
12437 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12438 &cdt, 0, 0, 0) == 0)
12439 ? cdt : 0, NULL);
12440
12441 /* File length in bytes. */
12442 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12443 0, &siz, 0, 0) == 0)
12444 ? siz : 0, NULL);
12445 #else
12446 output_line_string (str_form,
12447 files[file_idx].path + dirs[dir_idx].length,
12448 "File Entry", (unsigned) i + 1);
12449
12450 /* Include directory index. */
12451 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12452 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12453 dir_idx + idx_offset, NULL);
12454 else
12455 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12456
12457 if (dwarf_version >= 5)
12458 continue;
12459
12460 /* Modification time. */
12461 dw2_asm_output_data_uleb128 (0, NULL);
12462
12463 /* File length in bytes. */
12464 dw2_asm_output_data_uleb128 (0, NULL);
12465 #endif /* VMS_DEBUGGING_INFO */
12466 }
12467
12468 if (dwarf_version < 5)
12469 dw2_asm_output_data (1, 0, "End file name table");
12470 }
12471
12472
12473 /* Output one line number table into the .debug_line section. */
12474
12475 static void
12476 output_one_line_info_table (dw_line_info_table *table)
12477 {
12478 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12479 unsigned int current_line = 1;
12480 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12481 dw_line_info_entry *ent, *prev_addr;
12482 size_t i;
12483 unsigned int view;
12484
12485 view = 0;
12486
12487 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12488 {
12489 switch (ent->opcode)
12490 {
12491 case LI_set_address:
12492 /* ??? Unfortunately, we have little choice here currently, and
12493 must always use the most general form. GCC does not know the
12494 address delta itself, so we can't use DW_LNS_advance_pc. Many
12495 ports do have length attributes which will give an upper bound
12496 on the address range. We could perhaps use length attributes
12497 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12498 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12499
12500 view = 0;
12501
12502 /* This can handle any delta. This takes
12503 4+DWARF2_ADDR_SIZE bytes. */
12504 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12505 debug_variable_location_views
12506 ? ", reset view to 0" : "");
12507 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12508 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12509 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12510
12511 prev_addr = ent;
12512 break;
12513
12514 case LI_adv_address:
12515 {
12516 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12517 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12518 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12519
12520 view++;
12521
12522 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12523 dw2_asm_output_delta (2, line_label, prev_label,
12524 "from %s to %s", prev_label, line_label);
12525
12526 prev_addr = ent;
12527 break;
12528 }
12529
12530 case LI_set_line:
12531 if (ent->val == current_line)
12532 {
12533 /* We still need to start a new row, so output a copy insn. */
12534 dw2_asm_output_data (1, DW_LNS_copy,
12535 "copy line %u", current_line);
12536 }
12537 else
12538 {
12539 int line_offset = ent->val - current_line;
12540 int line_delta = line_offset - DWARF_LINE_BASE;
12541
12542 current_line = ent->val;
12543 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12544 {
12545 /* This can handle deltas from -10 to 234, using the current
12546 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12547 This takes 1 byte. */
12548 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12549 "line %u", current_line);
12550 }
12551 else
12552 {
12553 /* This can handle any delta. This takes at least 4 bytes,
12554 depending on the value being encoded. */
12555 dw2_asm_output_data (1, DW_LNS_advance_line,
12556 "advance to line %u", current_line);
12557 dw2_asm_output_data_sleb128 (line_offset, NULL);
12558 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12559 }
12560 }
12561 break;
12562
12563 case LI_set_file:
12564 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12565 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12566 break;
12567
12568 case LI_set_column:
12569 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12570 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12571 break;
12572
12573 case LI_negate_stmt:
12574 current_is_stmt = !current_is_stmt;
12575 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12576 "is_stmt %d", current_is_stmt);
12577 break;
12578
12579 case LI_set_prologue_end:
12580 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12581 "set prologue end");
12582 break;
12583
12584 case LI_set_epilogue_begin:
12585 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12586 "set epilogue begin");
12587 break;
12588
12589 case LI_set_discriminator:
12590 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12591 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12592 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12593 dw2_asm_output_data_uleb128 (ent->val, NULL);
12594 break;
12595 }
12596 }
12597
12598 /* Emit debug info for the address of the end of the table. */
12599 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12600 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12601 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12602 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12603
12604 dw2_asm_output_data (1, 0, "end sequence");
12605 dw2_asm_output_data_uleb128 (1, NULL);
12606 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12607 }
12608
12609 /* Output the source line number correspondence information. This
12610 information goes into the .debug_line section. */
12611
12612 static void
12613 output_line_info (bool prologue_only)
12614 {
12615 static unsigned int generation;
12616 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12617 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12618 bool saw_one = false;
12619 int opc;
12620
12621 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12622 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12623 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12624 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12625
12626 if (!XCOFF_DEBUGGING_INFO)
12627 {
12628 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12629 dw2_asm_output_data (4, 0xffffffff,
12630 "Initial length escape value indicating 64-bit DWARF extension");
12631 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12632 "Length of Source Line Info");
12633 }
12634
12635 ASM_OUTPUT_LABEL (asm_out_file, l1);
12636
12637 output_dwarf_version ();
12638 if (dwarf_version >= 5)
12639 {
12640 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12641 dw2_asm_output_data (1, 0, "Segment Size");
12642 }
12643 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12644 ASM_OUTPUT_LABEL (asm_out_file, p1);
12645
12646 /* Define the architecture-dependent minimum instruction length (in bytes).
12647 In this implementation of DWARF, this field is used for information
12648 purposes only. Since GCC generates assembly language, we have no
12649 a priori knowledge of how many instruction bytes are generated for each
12650 source line, and therefore can use only the DW_LNE_set_address and
12651 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12652 this as '1', which is "correct enough" for all architectures,
12653 and don't let the target override. */
12654 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12655
12656 if (dwarf_version >= 4)
12657 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12658 "Maximum Operations Per Instruction");
12659 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12660 "Default is_stmt_start flag");
12661 dw2_asm_output_data (1, DWARF_LINE_BASE,
12662 "Line Base Value (Special Opcodes)");
12663 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12664 "Line Range Value (Special Opcodes)");
12665 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12666 "Special Opcode Base");
12667
12668 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12669 {
12670 int n_op_args;
12671 switch (opc)
12672 {
12673 case DW_LNS_advance_pc:
12674 case DW_LNS_advance_line:
12675 case DW_LNS_set_file:
12676 case DW_LNS_set_column:
12677 case DW_LNS_fixed_advance_pc:
12678 case DW_LNS_set_isa:
12679 n_op_args = 1;
12680 break;
12681 default:
12682 n_op_args = 0;
12683 break;
12684 }
12685
12686 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12687 opc, n_op_args);
12688 }
12689
12690 /* Write out the information about the files we use. */
12691 output_file_names ();
12692 ASM_OUTPUT_LABEL (asm_out_file, p2);
12693 if (prologue_only)
12694 {
12695 /* Output the marker for the end of the line number info. */
12696 ASM_OUTPUT_LABEL (asm_out_file, l2);
12697 return;
12698 }
12699
12700 if (separate_line_info)
12701 {
12702 dw_line_info_table *table;
12703 size_t i;
12704
12705 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12706 if (table->in_use)
12707 {
12708 output_one_line_info_table (table);
12709 saw_one = true;
12710 }
12711 }
12712 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12713 {
12714 output_one_line_info_table (cold_text_section_line_info);
12715 saw_one = true;
12716 }
12717
12718 /* ??? Some Darwin linkers crash on a .debug_line section with no
12719 sequences. Further, merely a DW_LNE_end_sequence entry is not
12720 sufficient -- the address column must also be initialized.
12721 Make sure to output at least one set_address/end_sequence pair,
12722 choosing .text since that section is always present. */
12723 if (text_section_line_info->in_use || !saw_one)
12724 output_one_line_info_table (text_section_line_info);
12725
12726 /* Output the marker for the end of the line number info. */
12727 ASM_OUTPUT_LABEL (asm_out_file, l2);
12728 }
12729 \f
12730 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12731
12732 static inline bool
12733 need_endianity_attribute_p (bool reverse)
12734 {
12735 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12736 }
12737
12738 /* Given a pointer to a tree node for some base type, return a pointer to
12739 a DIE that describes the given type. REVERSE is true if the type is
12740 to be interpreted in the reverse storage order wrt the target order.
12741
12742 This routine must only be called for GCC type nodes that correspond to
12743 Dwarf base (fundamental) types. */
12744
12745 static dw_die_ref
12746 base_type_die (tree type, bool reverse)
12747 {
12748 dw_die_ref base_type_result;
12749 enum dwarf_type encoding;
12750 bool fpt_used = false;
12751 struct fixed_point_type_info fpt_info;
12752 tree type_bias = NULL_TREE;
12753
12754 /* If this is a subtype that should not be emitted as a subrange type,
12755 use the base type. See subrange_type_for_debug_p. */
12756 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12757 type = TREE_TYPE (type);
12758
12759 switch (TREE_CODE (type))
12760 {
12761 case INTEGER_TYPE:
12762 if ((dwarf_version >= 4 || !dwarf_strict)
12763 && TYPE_NAME (type)
12764 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12765 && DECL_IS_BUILTIN (TYPE_NAME (type))
12766 && DECL_NAME (TYPE_NAME (type)))
12767 {
12768 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12769 if (strcmp (name, "char16_t") == 0
12770 || strcmp (name, "char32_t") == 0)
12771 {
12772 encoding = DW_ATE_UTF;
12773 break;
12774 }
12775 }
12776 if ((dwarf_version >= 3 || !dwarf_strict)
12777 && lang_hooks.types.get_fixed_point_type_info)
12778 {
12779 memset (&fpt_info, 0, sizeof (fpt_info));
12780 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12781 {
12782 fpt_used = true;
12783 encoding = ((TYPE_UNSIGNED (type))
12784 ? DW_ATE_unsigned_fixed
12785 : DW_ATE_signed_fixed);
12786 break;
12787 }
12788 }
12789 if (TYPE_STRING_FLAG (type))
12790 {
12791 if (TYPE_UNSIGNED (type))
12792 encoding = DW_ATE_unsigned_char;
12793 else
12794 encoding = DW_ATE_signed_char;
12795 }
12796 else if (TYPE_UNSIGNED (type))
12797 encoding = DW_ATE_unsigned;
12798 else
12799 encoding = DW_ATE_signed;
12800
12801 if (!dwarf_strict
12802 && lang_hooks.types.get_type_bias)
12803 type_bias = lang_hooks.types.get_type_bias (type);
12804 break;
12805
12806 case REAL_TYPE:
12807 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12808 {
12809 if (dwarf_version >= 3 || !dwarf_strict)
12810 encoding = DW_ATE_decimal_float;
12811 else
12812 encoding = DW_ATE_lo_user;
12813 }
12814 else
12815 encoding = DW_ATE_float;
12816 break;
12817
12818 case FIXED_POINT_TYPE:
12819 if (!(dwarf_version >= 3 || !dwarf_strict))
12820 encoding = DW_ATE_lo_user;
12821 else if (TYPE_UNSIGNED (type))
12822 encoding = DW_ATE_unsigned_fixed;
12823 else
12824 encoding = DW_ATE_signed_fixed;
12825 break;
12826
12827 /* Dwarf2 doesn't know anything about complex ints, so use
12828 a user defined type for it. */
12829 case COMPLEX_TYPE:
12830 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12831 encoding = DW_ATE_complex_float;
12832 else
12833 encoding = DW_ATE_lo_user;
12834 break;
12835
12836 case BOOLEAN_TYPE:
12837 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12838 encoding = DW_ATE_boolean;
12839 break;
12840
12841 default:
12842 /* No other TREE_CODEs are Dwarf fundamental types. */
12843 gcc_unreachable ();
12844 }
12845
12846 base_type_result = new_die_raw (DW_TAG_base_type);
12847
12848 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12849 int_size_in_bytes (type));
12850 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12851
12852 if (need_endianity_attribute_p (reverse))
12853 add_AT_unsigned (base_type_result, DW_AT_endianity,
12854 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12855
12856 add_alignment_attribute (base_type_result, type);
12857
12858 if (fpt_used)
12859 {
12860 switch (fpt_info.scale_factor_kind)
12861 {
12862 case fixed_point_scale_factor_binary:
12863 add_AT_int (base_type_result, DW_AT_binary_scale,
12864 fpt_info.scale_factor.binary);
12865 break;
12866
12867 case fixed_point_scale_factor_decimal:
12868 add_AT_int (base_type_result, DW_AT_decimal_scale,
12869 fpt_info.scale_factor.decimal);
12870 break;
12871
12872 case fixed_point_scale_factor_arbitrary:
12873 /* Arbitrary scale factors cannot be described in standard DWARF,
12874 yet. */
12875 if (!dwarf_strict)
12876 {
12877 /* Describe the scale factor as a rational constant. */
12878 const dw_die_ref scale_factor
12879 = new_die (DW_TAG_constant, comp_unit_die (), type);
12880
12881 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12882 fpt_info.scale_factor.arbitrary.numerator);
12883 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12884 fpt_info.scale_factor.arbitrary.denominator);
12885
12886 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12887 }
12888 break;
12889
12890 default:
12891 gcc_unreachable ();
12892 }
12893 }
12894
12895 if (type_bias)
12896 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12897 dw_scalar_form_constant
12898 | dw_scalar_form_exprloc
12899 | dw_scalar_form_reference,
12900 NULL);
12901
12902 return base_type_result;
12903 }
12904
12905 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12906 named 'auto' in its type: return true for it, false otherwise. */
12907
12908 static inline bool
12909 is_cxx_auto (tree type)
12910 {
12911 if (is_cxx ())
12912 {
12913 tree name = TYPE_IDENTIFIER (type);
12914 if (name == get_identifier ("auto")
12915 || name == get_identifier ("decltype(auto)"))
12916 return true;
12917 }
12918 return false;
12919 }
12920
12921 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12922 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12923
12924 static inline int
12925 is_base_type (tree type)
12926 {
12927 switch (TREE_CODE (type))
12928 {
12929 case INTEGER_TYPE:
12930 case REAL_TYPE:
12931 case FIXED_POINT_TYPE:
12932 case COMPLEX_TYPE:
12933 case BOOLEAN_TYPE:
12934 return 1;
12935
12936 case VOID_TYPE:
12937 case ARRAY_TYPE:
12938 case RECORD_TYPE:
12939 case UNION_TYPE:
12940 case QUAL_UNION_TYPE:
12941 case ENUMERAL_TYPE:
12942 case FUNCTION_TYPE:
12943 case METHOD_TYPE:
12944 case POINTER_TYPE:
12945 case REFERENCE_TYPE:
12946 case NULLPTR_TYPE:
12947 case OFFSET_TYPE:
12948 case LANG_TYPE:
12949 case VECTOR_TYPE:
12950 return 0;
12951
12952 default:
12953 if (is_cxx_auto (type))
12954 return 0;
12955 gcc_unreachable ();
12956 }
12957
12958 return 0;
12959 }
12960
12961 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12962 node, return the size in bits for the type if it is a constant, or else
12963 return the alignment for the type if the type's size is not constant, or
12964 else return BITS_PER_WORD if the type actually turns out to be an
12965 ERROR_MARK node. */
12966
12967 static inline unsigned HOST_WIDE_INT
12968 simple_type_size_in_bits (const_tree type)
12969 {
12970 if (TREE_CODE (type) == ERROR_MARK)
12971 return BITS_PER_WORD;
12972 else if (TYPE_SIZE (type) == NULL_TREE)
12973 return 0;
12974 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12975 return tree_to_uhwi (TYPE_SIZE (type));
12976 else
12977 return TYPE_ALIGN (type);
12978 }
12979
12980 /* Similarly, but return an offset_int instead of UHWI. */
12981
12982 static inline offset_int
12983 offset_int_type_size_in_bits (const_tree type)
12984 {
12985 if (TREE_CODE (type) == ERROR_MARK)
12986 return BITS_PER_WORD;
12987 else if (TYPE_SIZE (type) == NULL_TREE)
12988 return 0;
12989 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12990 return wi::to_offset (TYPE_SIZE (type));
12991 else
12992 return TYPE_ALIGN (type);
12993 }
12994
12995 /* Given a pointer to a tree node for a subrange type, return a pointer
12996 to a DIE that describes the given type. */
12997
12998 static dw_die_ref
12999 subrange_type_die (tree type, tree low, tree high, tree bias,
13000 dw_die_ref context_die)
13001 {
13002 dw_die_ref subrange_die;
13003 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
13004
13005 if (context_die == NULL)
13006 context_die = comp_unit_die ();
13007
13008 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
13009
13010 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
13011 {
13012 /* The size of the subrange type and its base type do not match,
13013 so we need to generate a size attribute for the subrange type. */
13014 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13015 }
13016
13017 add_alignment_attribute (subrange_die, type);
13018
13019 if (low)
13020 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13021 if (high)
13022 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13023 if (bias && !dwarf_strict)
13024 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13025 dw_scalar_form_constant
13026 | dw_scalar_form_exprloc
13027 | dw_scalar_form_reference,
13028 NULL);
13029
13030 return subrange_die;
13031 }
13032
13033 /* Returns the (const and/or volatile) cv_qualifiers associated with
13034 the decl node. This will normally be augmented with the
13035 cv_qualifiers of the underlying type in add_type_attribute. */
13036
13037 static int
13038 decl_quals (const_tree decl)
13039 {
13040 return ((TREE_READONLY (decl)
13041 /* The C++ front-end correctly marks reference-typed
13042 variables as readonly, but from a language (and debug
13043 info) standpoint they are not const-qualified. */
13044 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13045 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13046 | (TREE_THIS_VOLATILE (decl)
13047 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13048 }
13049
13050 /* Determine the TYPE whose qualifiers match the largest strict subset
13051 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13052 qualifiers outside QUAL_MASK. */
13053
13054 static int
13055 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13056 {
13057 tree t;
13058 int best_rank = 0, best_qual = 0, max_rank;
13059
13060 type_quals &= qual_mask;
13061 max_rank = popcount_hwi (type_quals) - 1;
13062
13063 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13064 t = TYPE_NEXT_VARIANT (t))
13065 {
13066 int q = TYPE_QUALS (t) & qual_mask;
13067
13068 if ((q & type_quals) == q && q != type_quals
13069 && check_base_type (t, type))
13070 {
13071 int rank = popcount_hwi (q);
13072
13073 if (rank > best_rank)
13074 {
13075 best_rank = rank;
13076 best_qual = q;
13077 }
13078 }
13079 }
13080
13081 return best_qual;
13082 }
13083
13084 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13085 static const dwarf_qual_info_t dwarf_qual_info[] =
13086 {
13087 { TYPE_QUAL_CONST, DW_TAG_const_type },
13088 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13089 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13090 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13091 };
13092 static const unsigned int dwarf_qual_info_size
13093 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13094
13095 /* If DIE is a qualified DIE of some base DIE with the same parent,
13096 return the base DIE, otherwise return NULL. Set MASK to the
13097 qualifiers added compared to the returned DIE. */
13098
13099 static dw_die_ref
13100 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13101 {
13102 unsigned int i;
13103 for (i = 0; i < dwarf_qual_info_size; i++)
13104 if (die->die_tag == dwarf_qual_info[i].t)
13105 break;
13106 if (i == dwarf_qual_info_size)
13107 return NULL;
13108 if (vec_safe_length (die->die_attr) != 1)
13109 return NULL;
13110 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13111 if (type == NULL || type->die_parent != die->die_parent)
13112 return NULL;
13113 *mask |= dwarf_qual_info[i].q;
13114 if (depth)
13115 {
13116 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13117 if (ret)
13118 return ret;
13119 }
13120 return type;
13121 }
13122
13123 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13124 entry that chains the modifiers specified by CV_QUALS in front of the
13125 given type. REVERSE is true if the type is to be interpreted in the
13126 reverse storage order wrt the target order. */
13127
13128 static dw_die_ref
13129 modified_type_die (tree type, int cv_quals, bool reverse,
13130 dw_die_ref context_die)
13131 {
13132 enum tree_code code = TREE_CODE (type);
13133 dw_die_ref mod_type_die;
13134 dw_die_ref sub_die = NULL;
13135 tree item_type = NULL;
13136 tree qualified_type;
13137 tree name, low, high;
13138 dw_die_ref mod_scope;
13139 /* Only these cv-qualifiers are currently handled. */
13140 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13141 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13142 ENCODE_QUAL_ADDR_SPACE(~0U));
13143 const bool reverse_base_type
13144 = need_endianity_attribute_p (reverse) && is_base_type (type);
13145
13146 if (code == ERROR_MARK)
13147 return NULL;
13148
13149 if (lang_hooks.types.get_debug_type)
13150 {
13151 tree debug_type = lang_hooks.types.get_debug_type (type);
13152
13153 if (debug_type != NULL_TREE && debug_type != type)
13154 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13155 }
13156
13157 cv_quals &= cv_qual_mask;
13158
13159 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13160 tag modifier (and not an attribute) old consumers won't be able
13161 to handle it. */
13162 if (dwarf_version < 3)
13163 cv_quals &= ~TYPE_QUAL_RESTRICT;
13164
13165 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13166 if (dwarf_version < 5)
13167 cv_quals &= ~TYPE_QUAL_ATOMIC;
13168
13169 /* See if we already have the appropriately qualified variant of
13170 this type. */
13171 qualified_type = get_qualified_type (type, cv_quals);
13172
13173 if (qualified_type == sizetype)
13174 {
13175 /* Try not to expose the internal sizetype type's name. */
13176 if (TYPE_NAME (qualified_type)
13177 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13178 {
13179 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13180
13181 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13182 && (TYPE_PRECISION (t)
13183 == TYPE_PRECISION (qualified_type))
13184 && (TYPE_UNSIGNED (t)
13185 == TYPE_UNSIGNED (qualified_type)));
13186 qualified_type = t;
13187 }
13188 else if (qualified_type == sizetype
13189 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13190 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13191 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13192 qualified_type = size_type_node;
13193 }
13194
13195 /* If we do, then we can just use its DIE, if it exists. */
13196 if (qualified_type)
13197 {
13198 mod_type_die = lookup_type_die (qualified_type);
13199
13200 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13201 dealt with specially: the DIE with the attribute, if it exists, is
13202 placed immediately after the regular DIE for the same base type. */
13203 if (mod_type_die
13204 && (!reverse_base_type
13205 || ((mod_type_die = mod_type_die->die_sib) != NULL
13206 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13207 return mod_type_die;
13208 }
13209
13210 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13211
13212 /* Handle C typedef types. */
13213 if (name
13214 && TREE_CODE (name) == TYPE_DECL
13215 && DECL_ORIGINAL_TYPE (name)
13216 && !DECL_ARTIFICIAL (name))
13217 {
13218 tree dtype = TREE_TYPE (name);
13219
13220 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13221 if (qualified_type == dtype && !reverse_base_type)
13222 {
13223 tree origin = decl_ultimate_origin (name);
13224
13225 /* Typedef variants that have an abstract origin don't get their own
13226 type DIE (see gen_typedef_die), so fall back on the ultimate
13227 abstract origin instead. */
13228 if (origin != NULL && origin != name)
13229 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13230 context_die);
13231
13232 /* For a named type, use the typedef. */
13233 gen_type_die (qualified_type, context_die);
13234 return lookup_type_die (qualified_type);
13235 }
13236 else
13237 {
13238 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13239 dquals &= cv_qual_mask;
13240 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13241 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13242 /* cv-unqualified version of named type. Just use
13243 the unnamed type to which it refers. */
13244 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13245 reverse, context_die);
13246 /* Else cv-qualified version of named type; fall through. */
13247 }
13248 }
13249
13250 mod_scope = scope_die_for (type, context_die);
13251
13252 if (cv_quals)
13253 {
13254 int sub_quals = 0, first_quals = 0;
13255 unsigned i;
13256 dw_die_ref first = NULL, last = NULL;
13257
13258 /* Determine a lesser qualified type that most closely matches
13259 this one. Then generate DW_TAG_* entries for the remaining
13260 qualifiers. */
13261 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13262 cv_qual_mask);
13263 if (sub_quals && use_debug_types)
13264 {
13265 bool needed = false;
13266 /* If emitting type units, make sure the order of qualifiers
13267 is canonical. Thus, start from unqualified type if
13268 an earlier qualifier is missing in sub_quals, but some later
13269 one is present there. */
13270 for (i = 0; i < dwarf_qual_info_size; i++)
13271 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13272 needed = true;
13273 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13274 {
13275 sub_quals = 0;
13276 break;
13277 }
13278 }
13279 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13280 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13281 {
13282 /* As not all intermediate qualified DIEs have corresponding
13283 tree types, ensure that qualified DIEs in the same scope
13284 as their DW_AT_type are emitted after their DW_AT_type,
13285 only with other qualified DIEs for the same type possibly
13286 in between them. Determine the range of such qualified
13287 DIEs now (first being the base type, last being corresponding
13288 last qualified DIE for it). */
13289 unsigned int count = 0;
13290 first = qualified_die_p (mod_type_die, &first_quals,
13291 dwarf_qual_info_size);
13292 if (first == NULL)
13293 first = mod_type_die;
13294 gcc_assert ((first_quals & ~sub_quals) == 0);
13295 for (count = 0, last = first;
13296 count < (1U << dwarf_qual_info_size);
13297 count++, last = last->die_sib)
13298 {
13299 int quals = 0;
13300 if (last == mod_scope->die_child)
13301 break;
13302 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13303 != first)
13304 break;
13305 }
13306 }
13307
13308 for (i = 0; i < dwarf_qual_info_size; i++)
13309 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13310 {
13311 dw_die_ref d;
13312 if (first && first != last)
13313 {
13314 for (d = first->die_sib; ; d = d->die_sib)
13315 {
13316 int quals = 0;
13317 qualified_die_p (d, &quals, dwarf_qual_info_size);
13318 if (quals == (first_quals | dwarf_qual_info[i].q))
13319 break;
13320 if (d == last)
13321 {
13322 d = NULL;
13323 break;
13324 }
13325 }
13326 if (d)
13327 {
13328 mod_type_die = d;
13329 continue;
13330 }
13331 }
13332 if (first)
13333 {
13334 d = new_die_raw (dwarf_qual_info[i].t);
13335 add_child_die_after (mod_scope, d, last);
13336 last = d;
13337 }
13338 else
13339 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13340 if (mod_type_die)
13341 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13342 mod_type_die = d;
13343 first_quals |= dwarf_qual_info[i].q;
13344 }
13345 }
13346 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13347 {
13348 dwarf_tag tag = DW_TAG_pointer_type;
13349 if (code == REFERENCE_TYPE)
13350 {
13351 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13352 tag = DW_TAG_rvalue_reference_type;
13353 else
13354 tag = DW_TAG_reference_type;
13355 }
13356 mod_type_die = new_die (tag, mod_scope, type);
13357
13358 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13359 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13360 add_alignment_attribute (mod_type_die, type);
13361 item_type = TREE_TYPE (type);
13362
13363 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13364 if (!ADDR_SPACE_GENERIC_P (as))
13365 {
13366 int action = targetm.addr_space.debug (as);
13367 if (action >= 0)
13368 {
13369 /* Positive values indicate an address_class. */
13370 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13371 }
13372 else
13373 {
13374 /* Negative values indicate an (inverted) segment base reg. */
13375 dw_loc_descr_ref d
13376 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13377 add_AT_loc (mod_type_die, DW_AT_segment, d);
13378 }
13379 }
13380 }
13381 else if (code == INTEGER_TYPE
13382 && TREE_TYPE (type) != NULL_TREE
13383 && subrange_type_for_debug_p (type, &low, &high))
13384 {
13385 tree bias = NULL_TREE;
13386 if (lang_hooks.types.get_type_bias)
13387 bias = lang_hooks.types.get_type_bias (type);
13388 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13389 item_type = TREE_TYPE (type);
13390 }
13391 else if (is_base_type (type))
13392 {
13393 mod_type_die = base_type_die (type, reverse);
13394
13395 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13396 if (reverse_base_type)
13397 {
13398 dw_die_ref after_die
13399 = modified_type_die (type, cv_quals, false, context_die);
13400 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13401 }
13402 else
13403 add_child_die (comp_unit_die (), mod_type_die);
13404
13405 add_pubtype (type, mod_type_die);
13406 }
13407 else
13408 {
13409 gen_type_die (type, context_die);
13410
13411 /* We have to get the type_main_variant here (and pass that to the
13412 `lookup_type_die' routine) because the ..._TYPE node we have
13413 might simply be a *copy* of some original type node (where the
13414 copy was created to help us keep track of typedef names) and
13415 that copy might have a different TYPE_UID from the original
13416 ..._TYPE node. */
13417 if (TREE_CODE (type) == FUNCTION_TYPE
13418 || TREE_CODE (type) == METHOD_TYPE)
13419 {
13420 /* For function/method types, can't just use type_main_variant here,
13421 because that can have different ref-qualifiers for C++,
13422 but try to canonicalize. */
13423 tree main = TYPE_MAIN_VARIANT (type);
13424 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13425 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13426 && check_base_type (t, main)
13427 && check_lang_type (t, type))
13428 return lookup_type_die (t);
13429 return lookup_type_die (type);
13430 }
13431 else if (TREE_CODE (type) != VECTOR_TYPE
13432 && TREE_CODE (type) != ARRAY_TYPE)
13433 return lookup_type_die (type_main_variant (type));
13434 else
13435 /* Vectors have the debugging information in the type,
13436 not the main variant. */
13437 return lookup_type_die (type);
13438 }
13439
13440 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13441 don't output a DW_TAG_typedef, since there isn't one in the
13442 user's program; just attach a DW_AT_name to the type.
13443 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13444 if the base type already has the same name. */
13445 if (name
13446 && ((TREE_CODE (name) != TYPE_DECL
13447 && (qualified_type == TYPE_MAIN_VARIANT (type)
13448 || (cv_quals == TYPE_UNQUALIFIED)))
13449 || (TREE_CODE (name) == TYPE_DECL
13450 && TREE_TYPE (name) == qualified_type
13451 && DECL_NAME (name))))
13452 {
13453 if (TREE_CODE (name) == TYPE_DECL)
13454 /* Could just call add_name_and_src_coords_attributes here,
13455 but since this is a builtin type it doesn't have any
13456 useful source coordinates anyway. */
13457 name = DECL_NAME (name);
13458 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13459 }
13460 /* This probably indicates a bug. */
13461 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13462 {
13463 name = TYPE_IDENTIFIER (type);
13464 add_name_attribute (mod_type_die,
13465 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13466 }
13467
13468 if (qualified_type && !reverse_base_type)
13469 equate_type_number_to_die (qualified_type, mod_type_die);
13470
13471 if (item_type)
13472 /* We must do this after the equate_type_number_to_die call, in case
13473 this is a recursive type. This ensures that the modified_type_die
13474 recursion will terminate even if the type is recursive. Recursive
13475 types are possible in Ada. */
13476 sub_die = modified_type_die (item_type,
13477 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13478 reverse,
13479 context_die);
13480
13481 if (sub_die != NULL)
13482 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13483
13484 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13485 if (TYPE_ARTIFICIAL (type))
13486 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13487
13488 return mod_type_die;
13489 }
13490
13491 /* Generate DIEs for the generic parameters of T.
13492 T must be either a generic type or a generic function.
13493 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13494
13495 static void
13496 gen_generic_params_dies (tree t)
13497 {
13498 tree parms, args;
13499 int parms_num, i;
13500 dw_die_ref die = NULL;
13501 int non_default;
13502
13503 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13504 return;
13505
13506 if (TYPE_P (t))
13507 die = lookup_type_die (t);
13508 else if (DECL_P (t))
13509 die = lookup_decl_die (t);
13510
13511 gcc_assert (die);
13512
13513 parms = lang_hooks.get_innermost_generic_parms (t);
13514 if (!parms)
13515 /* T has no generic parameter. It means T is neither a generic type
13516 or function. End of story. */
13517 return;
13518
13519 parms_num = TREE_VEC_LENGTH (parms);
13520 args = lang_hooks.get_innermost_generic_args (t);
13521 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13522 non_default = int_cst_value (TREE_CHAIN (args));
13523 else
13524 non_default = TREE_VEC_LENGTH (args);
13525 for (i = 0; i < parms_num; i++)
13526 {
13527 tree parm, arg, arg_pack_elems;
13528 dw_die_ref parm_die;
13529
13530 parm = TREE_VEC_ELT (parms, i);
13531 arg = TREE_VEC_ELT (args, i);
13532 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13533 gcc_assert (parm && TREE_VALUE (parm) && arg);
13534
13535 if (parm && TREE_VALUE (parm) && arg)
13536 {
13537 /* If PARM represents a template parameter pack,
13538 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13539 by DW_TAG_template_*_parameter DIEs for the argument
13540 pack elements of ARG. Note that ARG would then be
13541 an argument pack. */
13542 if (arg_pack_elems)
13543 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13544 arg_pack_elems,
13545 die);
13546 else
13547 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13548 true /* emit name */, die);
13549 if (i >= non_default)
13550 add_AT_flag (parm_die, DW_AT_default_value, 1);
13551 }
13552 }
13553 }
13554
13555 /* Create and return a DIE for PARM which should be
13556 the representation of a generic type parameter.
13557 For instance, in the C++ front end, PARM would be a template parameter.
13558 ARG is the argument to PARM.
13559 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13560 name of the PARM.
13561 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13562 as a child node. */
13563
13564 static dw_die_ref
13565 generic_parameter_die (tree parm, tree arg,
13566 bool emit_name_p,
13567 dw_die_ref parent_die)
13568 {
13569 dw_die_ref tmpl_die = NULL;
13570 const char *name = NULL;
13571
13572 if (!parm || !DECL_NAME (parm) || !arg)
13573 return NULL;
13574
13575 /* We support non-type generic parameters and arguments,
13576 type generic parameters and arguments, as well as
13577 generic generic parameters (a.k.a. template template parameters in C++)
13578 and arguments. */
13579 if (TREE_CODE (parm) == PARM_DECL)
13580 /* PARM is a nontype generic parameter */
13581 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13582 else if (TREE_CODE (parm) == TYPE_DECL)
13583 /* PARM is a type generic parameter. */
13584 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13585 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13586 /* PARM is a generic generic parameter.
13587 Its DIE is a GNU extension. It shall have a
13588 DW_AT_name attribute to represent the name of the template template
13589 parameter, and a DW_AT_GNU_template_name attribute to represent the
13590 name of the template template argument. */
13591 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13592 parent_die, parm);
13593 else
13594 gcc_unreachable ();
13595
13596 if (tmpl_die)
13597 {
13598 tree tmpl_type;
13599
13600 /* If PARM is a generic parameter pack, it means we are
13601 emitting debug info for a template argument pack element.
13602 In other terms, ARG is a template argument pack element.
13603 In that case, we don't emit any DW_AT_name attribute for
13604 the die. */
13605 if (emit_name_p)
13606 {
13607 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13608 gcc_assert (name);
13609 add_AT_string (tmpl_die, DW_AT_name, name);
13610 }
13611
13612 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13613 {
13614 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13615 TMPL_DIE should have a child DW_AT_type attribute that is set
13616 to the type of the argument to PARM, which is ARG.
13617 If PARM is a type generic parameter, TMPL_DIE should have a
13618 child DW_AT_type that is set to ARG. */
13619 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13620 add_type_attribute (tmpl_die, tmpl_type,
13621 (TREE_THIS_VOLATILE (tmpl_type)
13622 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13623 false, parent_die);
13624 }
13625 else
13626 {
13627 /* So TMPL_DIE is a DIE representing a
13628 a generic generic template parameter, a.k.a template template
13629 parameter in C++ and arg is a template. */
13630
13631 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13632 to the name of the argument. */
13633 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13634 if (name)
13635 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13636 }
13637
13638 if (TREE_CODE (parm) == PARM_DECL)
13639 /* So PARM is a non-type generic parameter.
13640 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13641 attribute of TMPL_DIE which value represents the value
13642 of ARG.
13643 We must be careful here:
13644 The value of ARG might reference some function decls.
13645 We might currently be emitting debug info for a generic
13646 type and types are emitted before function decls, we don't
13647 know if the function decls referenced by ARG will actually be
13648 emitted after cgraph computations.
13649 So must defer the generation of the DW_AT_const_value to
13650 after cgraph is ready. */
13651 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13652 }
13653
13654 return tmpl_die;
13655 }
13656
13657 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13658 PARM_PACK must be a template parameter pack. The returned DIE
13659 will be child DIE of PARENT_DIE. */
13660
13661 static dw_die_ref
13662 template_parameter_pack_die (tree parm_pack,
13663 tree parm_pack_args,
13664 dw_die_ref parent_die)
13665 {
13666 dw_die_ref die;
13667 int j;
13668
13669 gcc_assert (parent_die && parm_pack);
13670
13671 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13672 add_name_and_src_coords_attributes (die, parm_pack);
13673 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13674 generic_parameter_die (parm_pack,
13675 TREE_VEC_ELT (parm_pack_args, j),
13676 false /* Don't emit DW_AT_name */,
13677 die);
13678 return die;
13679 }
13680
13681 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13682 an enumerated type. */
13683
13684 static inline int
13685 type_is_enum (const_tree type)
13686 {
13687 return TREE_CODE (type) == ENUMERAL_TYPE;
13688 }
13689
13690 /* Return the DBX register number described by a given RTL node. */
13691
13692 static unsigned int
13693 dbx_reg_number (const_rtx rtl)
13694 {
13695 unsigned regno = REGNO (rtl);
13696
13697 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13698
13699 #ifdef LEAF_REG_REMAP
13700 if (crtl->uses_only_leaf_regs)
13701 {
13702 int leaf_reg = LEAF_REG_REMAP (regno);
13703 if (leaf_reg != -1)
13704 regno = (unsigned) leaf_reg;
13705 }
13706 #endif
13707
13708 regno = DBX_REGISTER_NUMBER (regno);
13709 gcc_assert (regno != INVALID_REGNUM);
13710 return regno;
13711 }
13712
13713 /* Optionally add a DW_OP_piece term to a location description expression.
13714 DW_OP_piece is only added if the location description expression already
13715 doesn't end with DW_OP_piece. */
13716
13717 static void
13718 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13719 {
13720 dw_loc_descr_ref loc;
13721
13722 if (*list_head != NULL)
13723 {
13724 /* Find the end of the chain. */
13725 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13726 ;
13727
13728 if (loc->dw_loc_opc != DW_OP_piece)
13729 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13730 }
13731 }
13732
13733 /* Return a location descriptor that designates a machine register or
13734 zero if there is none. */
13735
13736 static dw_loc_descr_ref
13737 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13738 {
13739 rtx regs;
13740
13741 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13742 return 0;
13743
13744 /* We only use "frame base" when we're sure we're talking about the
13745 post-prologue local stack frame. We do this by *not* running
13746 register elimination until this point, and recognizing the special
13747 argument pointer and soft frame pointer rtx's.
13748 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13749 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13750 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13751 {
13752 dw_loc_descr_ref result = NULL;
13753
13754 if (dwarf_version >= 4 || !dwarf_strict)
13755 {
13756 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13757 initialized);
13758 if (result)
13759 add_loc_descr (&result,
13760 new_loc_descr (DW_OP_stack_value, 0, 0));
13761 }
13762 return result;
13763 }
13764
13765 regs = targetm.dwarf_register_span (rtl);
13766
13767 if (REG_NREGS (rtl) > 1 || regs)
13768 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13769 else
13770 {
13771 unsigned int dbx_regnum = dbx_reg_number (rtl);
13772 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13773 return 0;
13774 return one_reg_loc_descriptor (dbx_regnum, initialized);
13775 }
13776 }
13777
13778 /* Return a location descriptor that designates a machine register for
13779 a given hard register number. */
13780
13781 static dw_loc_descr_ref
13782 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13783 {
13784 dw_loc_descr_ref reg_loc_descr;
13785
13786 if (regno <= 31)
13787 reg_loc_descr
13788 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13789 else
13790 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13791
13792 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13793 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13794
13795 return reg_loc_descr;
13796 }
13797
13798 /* Given an RTL of a register, return a location descriptor that
13799 designates a value that spans more than one register. */
13800
13801 static dw_loc_descr_ref
13802 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13803 enum var_init_status initialized)
13804 {
13805 int size, i;
13806 dw_loc_descr_ref loc_result = NULL;
13807
13808 /* Simple, contiguous registers. */
13809 if (regs == NULL_RTX)
13810 {
13811 unsigned reg = REGNO (rtl);
13812 int nregs;
13813
13814 #ifdef LEAF_REG_REMAP
13815 if (crtl->uses_only_leaf_regs)
13816 {
13817 int leaf_reg = LEAF_REG_REMAP (reg);
13818 if (leaf_reg != -1)
13819 reg = (unsigned) leaf_reg;
13820 }
13821 #endif
13822
13823 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13824 nregs = REG_NREGS (rtl);
13825
13826 /* At present we only track constant-sized pieces. */
13827 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13828 return NULL;
13829 size /= nregs;
13830
13831 loc_result = NULL;
13832 while (nregs--)
13833 {
13834 dw_loc_descr_ref t;
13835
13836 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13837 VAR_INIT_STATUS_INITIALIZED);
13838 add_loc_descr (&loc_result, t);
13839 add_loc_descr_op_piece (&loc_result, size);
13840 ++reg;
13841 }
13842 return loc_result;
13843 }
13844
13845 /* Now onto stupid register sets in non contiguous locations. */
13846
13847 gcc_assert (GET_CODE (regs) == PARALLEL);
13848
13849 /* At present we only track constant-sized pieces. */
13850 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13851 return NULL;
13852 loc_result = NULL;
13853
13854 for (i = 0; i < XVECLEN (regs, 0); ++i)
13855 {
13856 dw_loc_descr_ref t;
13857
13858 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13859 VAR_INIT_STATUS_INITIALIZED);
13860 add_loc_descr (&loc_result, t);
13861 add_loc_descr_op_piece (&loc_result, size);
13862 }
13863
13864 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13865 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13866 return loc_result;
13867 }
13868
13869 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13870
13871 /* Return a location descriptor that designates a constant i,
13872 as a compound operation from constant (i >> shift), constant shift
13873 and DW_OP_shl. */
13874
13875 static dw_loc_descr_ref
13876 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13877 {
13878 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13879 add_loc_descr (&ret, int_loc_descriptor (shift));
13880 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13881 return ret;
13882 }
13883
13884 /* Return a location descriptor that designates constant POLY_I. */
13885
13886 static dw_loc_descr_ref
13887 int_loc_descriptor (poly_int64 poly_i)
13888 {
13889 enum dwarf_location_atom op;
13890
13891 HOST_WIDE_INT i;
13892 if (!poly_i.is_constant (&i))
13893 {
13894 /* Create location descriptions for the non-constant part and
13895 add any constant offset at the end. */
13896 dw_loc_descr_ref ret = NULL;
13897 HOST_WIDE_INT constant = poly_i.coeffs[0];
13898 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13899 {
13900 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13901 if (coeff != 0)
13902 {
13903 dw_loc_descr_ref start = ret;
13904 unsigned int factor;
13905 int bias;
13906 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13907 (j, &factor, &bias);
13908
13909 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13910 add COEFF * (REGNO / FACTOR) now and subtract
13911 COEFF * BIAS from the final constant part. */
13912 constant -= coeff * bias;
13913 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13914 if (coeff % factor == 0)
13915 coeff /= factor;
13916 else
13917 {
13918 int amount = exact_log2 (factor);
13919 gcc_assert (amount >= 0);
13920 add_loc_descr (&ret, int_loc_descriptor (amount));
13921 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13922 }
13923 if (coeff != 1)
13924 {
13925 add_loc_descr (&ret, int_loc_descriptor (coeff));
13926 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13927 }
13928 if (start)
13929 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13930 }
13931 }
13932 loc_descr_plus_const (&ret, constant);
13933 return ret;
13934 }
13935
13936 /* Pick the smallest representation of a constant, rather than just
13937 defaulting to the LEB encoding. */
13938 if (i >= 0)
13939 {
13940 int clz = clz_hwi (i);
13941 int ctz = ctz_hwi (i);
13942 if (i <= 31)
13943 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13944 else if (i <= 0xff)
13945 op = DW_OP_const1u;
13946 else if (i <= 0xffff)
13947 op = DW_OP_const2u;
13948 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13949 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13950 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13951 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13952 while DW_OP_const4u is 5 bytes. */
13953 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13954 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13955 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13956 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13957 while DW_OP_const4u is 5 bytes. */
13958 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13959
13960 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13961 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13962 <= 4)
13963 {
13964 /* As i >= 2**31, the double cast above will yield a negative number.
13965 Since wrapping is defined in DWARF expressions we can output big
13966 positive integers as small negative ones, regardless of the size
13967 of host wide ints.
13968
13969 Here, since the evaluator will handle 32-bit values and since i >=
13970 2**31, we know it's going to be interpreted as a negative literal:
13971 store it this way if we can do better than 5 bytes this way. */
13972 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13973 }
13974 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13975 op = DW_OP_const4u;
13976
13977 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13978 least 6 bytes: see if we can do better before falling back to it. */
13979 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13980 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13981 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13982 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13983 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13984 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13985 >= HOST_BITS_PER_WIDE_INT)
13986 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13987 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13988 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13989 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13990 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13991 && size_of_uleb128 (i) > 6)
13992 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13993 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13994 else
13995 op = DW_OP_constu;
13996 }
13997 else
13998 {
13999 if (i >= -0x80)
14000 op = DW_OP_const1s;
14001 else if (i >= -0x8000)
14002 op = DW_OP_const2s;
14003 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14004 {
14005 if (size_of_int_loc_descriptor (i) < 5)
14006 {
14007 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14008 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14009 return ret;
14010 }
14011 op = DW_OP_const4s;
14012 }
14013 else
14014 {
14015 if (size_of_int_loc_descriptor (i)
14016 < (unsigned long) 1 + size_of_sleb128 (i))
14017 {
14018 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14019 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14020 return ret;
14021 }
14022 op = DW_OP_consts;
14023 }
14024 }
14025
14026 return new_loc_descr (op, i, 0);
14027 }
14028
14029 /* Likewise, for unsigned constants. */
14030
14031 static dw_loc_descr_ref
14032 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14033 {
14034 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14035 const unsigned HOST_WIDE_INT max_uint
14036 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14037
14038 /* If possible, use the clever signed constants handling. */
14039 if (i <= max_int)
14040 return int_loc_descriptor ((HOST_WIDE_INT) i);
14041
14042 /* Here, we are left with positive numbers that cannot be represented as
14043 HOST_WIDE_INT, i.e.:
14044 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14045
14046 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14047 whereas may be better to output a negative integer: thanks to integer
14048 wrapping, we know that:
14049 x = x - 2 ** DWARF2_ADDR_SIZE
14050 = x - 2 * (max (HOST_WIDE_INT) + 1)
14051 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14052 small negative integers. Let's try that in cases it will clearly improve
14053 the encoding: there is no gain turning DW_OP_const4u into
14054 DW_OP_const4s. */
14055 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14056 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14057 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14058 {
14059 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14060
14061 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14062 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14063 const HOST_WIDE_INT second_shift
14064 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14065
14066 /* So we finally have:
14067 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14068 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14069 return int_loc_descriptor (second_shift);
14070 }
14071
14072 /* Last chance: fallback to a simple constant operation. */
14073 return new_loc_descr
14074 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14075 ? DW_OP_const4u
14076 : DW_OP_const8u,
14077 i, 0);
14078 }
14079
14080 /* Generate and return a location description that computes the unsigned
14081 comparison of the two stack top entries (a OP b where b is the top-most
14082 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14083 LE_EXPR, GT_EXPR or GE_EXPR. */
14084
14085 static dw_loc_descr_ref
14086 uint_comparison_loc_list (enum tree_code kind)
14087 {
14088 enum dwarf_location_atom op, flip_op;
14089 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14090
14091 switch (kind)
14092 {
14093 case LT_EXPR:
14094 op = DW_OP_lt;
14095 break;
14096 case LE_EXPR:
14097 op = DW_OP_le;
14098 break;
14099 case GT_EXPR:
14100 op = DW_OP_gt;
14101 break;
14102 case GE_EXPR:
14103 op = DW_OP_ge;
14104 break;
14105 default:
14106 gcc_unreachable ();
14107 }
14108
14109 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14110 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14111
14112 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14113 possible to perform unsigned comparisons: we just have to distinguish
14114 three cases:
14115
14116 1. when a and b have the same sign (as signed integers); then we should
14117 return: a OP(signed) b;
14118
14119 2. when a is a negative signed integer while b is a positive one, then a
14120 is a greater unsigned integer than b; likewise when a and b's roles
14121 are flipped.
14122
14123 So first, compare the sign of the two operands. */
14124 ret = new_loc_descr (DW_OP_over, 0, 0);
14125 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14126 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14127 /* If they have different signs (i.e. they have different sign bits), then
14128 the stack top value has now the sign bit set and thus it's smaller than
14129 zero. */
14130 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14131 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14132 add_loc_descr (&ret, bra_node);
14133
14134 /* We are in case 1. At this point, we know both operands have the same
14135 sign, to it's safe to use the built-in signed comparison. */
14136 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14137 add_loc_descr (&ret, jmp_node);
14138
14139 /* We are in case 2. Here, we know both operands do not have the same sign,
14140 so we have to flip the signed comparison. */
14141 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14142 tmp = new_loc_descr (flip_op, 0, 0);
14143 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14144 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14145 add_loc_descr (&ret, tmp);
14146
14147 /* This dummy operation is necessary to make the two branches join. */
14148 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14149 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14150 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14151 add_loc_descr (&ret, tmp);
14152
14153 return ret;
14154 }
14155
14156 /* Likewise, but takes the location description lists (might be destructive on
14157 them). Return NULL if either is NULL or if concatenation fails. */
14158
14159 static dw_loc_list_ref
14160 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14161 enum tree_code kind)
14162 {
14163 if (left == NULL || right == NULL)
14164 return NULL;
14165
14166 add_loc_list (&left, right);
14167 if (left == NULL)
14168 return NULL;
14169
14170 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14171 return left;
14172 }
14173
14174 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14175 without actually allocating it. */
14176
14177 static unsigned long
14178 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14179 {
14180 return size_of_int_loc_descriptor (i >> shift)
14181 + size_of_int_loc_descriptor (shift)
14182 + 1;
14183 }
14184
14185 /* Return size_of_locs (int_loc_descriptor (i)) without
14186 actually allocating it. */
14187
14188 static unsigned long
14189 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14190 {
14191 unsigned long s;
14192
14193 if (i >= 0)
14194 {
14195 int clz, ctz;
14196 if (i <= 31)
14197 return 1;
14198 else if (i <= 0xff)
14199 return 2;
14200 else if (i <= 0xffff)
14201 return 3;
14202 clz = clz_hwi (i);
14203 ctz = ctz_hwi (i);
14204 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14205 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14206 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14207 - clz - 5);
14208 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14209 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14210 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14211 - clz - 8);
14212 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14213 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14214 <= 4)
14215 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14216 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14217 return 5;
14218 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14219 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14220 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14221 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14222 - clz - 8);
14223 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14224 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14225 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14226 - clz - 16);
14227 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14228 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14229 && s > 6)
14230 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14231 - clz - 32);
14232 else
14233 return 1 + s;
14234 }
14235 else
14236 {
14237 if (i >= -0x80)
14238 return 2;
14239 else if (i >= -0x8000)
14240 return 3;
14241 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14242 {
14243 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14244 {
14245 s = size_of_int_loc_descriptor (-i) + 1;
14246 if (s < 5)
14247 return s;
14248 }
14249 return 5;
14250 }
14251 else
14252 {
14253 unsigned long r = 1 + size_of_sleb128 (i);
14254 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14255 {
14256 s = size_of_int_loc_descriptor (-i) + 1;
14257 if (s < r)
14258 return s;
14259 }
14260 return r;
14261 }
14262 }
14263 }
14264
14265 /* Return loc description representing "address" of integer value.
14266 This can appear only as toplevel expression. */
14267
14268 static dw_loc_descr_ref
14269 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14270 {
14271 int litsize;
14272 dw_loc_descr_ref loc_result = NULL;
14273
14274 if (!(dwarf_version >= 4 || !dwarf_strict))
14275 return NULL;
14276
14277 litsize = size_of_int_loc_descriptor (i);
14278 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14279 is more compact. For DW_OP_stack_value we need:
14280 litsize + 1 (DW_OP_stack_value)
14281 and for DW_OP_implicit_value:
14282 1 (DW_OP_implicit_value) + 1 (length) + size. */
14283 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14284 {
14285 loc_result = int_loc_descriptor (i);
14286 add_loc_descr (&loc_result,
14287 new_loc_descr (DW_OP_stack_value, 0, 0));
14288 return loc_result;
14289 }
14290
14291 loc_result = new_loc_descr (DW_OP_implicit_value,
14292 size, 0);
14293 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14294 loc_result->dw_loc_oprnd2.v.val_int = i;
14295 return loc_result;
14296 }
14297
14298 /* Return a location descriptor that designates a base+offset location. */
14299
14300 static dw_loc_descr_ref
14301 based_loc_descr (rtx reg, poly_int64 offset,
14302 enum var_init_status initialized)
14303 {
14304 unsigned int regno;
14305 dw_loc_descr_ref result;
14306 dw_fde_ref fde = cfun->fde;
14307
14308 /* We only use "frame base" when we're sure we're talking about the
14309 post-prologue local stack frame. We do this by *not* running
14310 register elimination until this point, and recognizing the special
14311 argument pointer and soft frame pointer rtx's. */
14312 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14313 {
14314 rtx elim = (ira_use_lra_p
14315 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14316 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14317
14318 if (elim != reg)
14319 {
14320 elim = strip_offset_and_add (elim, &offset);
14321 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
14322 && (elim == hard_frame_pointer_rtx
14323 || elim == stack_pointer_rtx))
14324 || elim == (frame_pointer_needed
14325 ? hard_frame_pointer_rtx
14326 : stack_pointer_rtx));
14327
14328 /* If drap register is used to align stack, use frame
14329 pointer + offset to access stack variables. If stack
14330 is aligned without drap, use stack pointer + offset to
14331 access stack variables. */
14332 if (crtl->stack_realign_tried
14333 && reg == frame_pointer_rtx)
14334 {
14335 int base_reg
14336 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14337 ? HARD_FRAME_POINTER_REGNUM
14338 : REGNO (elim));
14339 return new_reg_loc_descr (base_reg, offset);
14340 }
14341
14342 gcc_assert (frame_pointer_fb_offset_valid);
14343 offset += frame_pointer_fb_offset;
14344 HOST_WIDE_INT const_offset;
14345 if (offset.is_constant (&const_offset))
14346 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14347 else
14348 {
14349 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14350 loc_descr_plus_const (&ret, offset);
14351 return ret;
14352 }
14353 }
14354 }
14355
14356 regno = REGNO (reg);
14357 #ifdef LEAF_REG_REMAP
14358 if (crtl->uses_only_leaf_regs)
14359 {
14360 int leaf_reg = LEAF_REG_REMAP (regno);
14361 if (leaf_reg != -1)
14362 regno = (unsigned) leaf_reg;
14363 }
14364 #endif
14365 regno = DWARF_FRAME_REGNUM (regno);
14366
14367 HOST_WIDE_INT const_offset;
14368 if (!optimize && fde
14369 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14370 && offset.is_constant (&const_offset))
14371 {
14372 /* Use cfa+offset to represent the location of arguments passed
14373 on the stack when drap is used to align stack.
14374 Only do this when not optimizing, for optimized code var-tracking
14375 is supposed to track where the arguments live and the register
14376 used as vdrap or drap in some spot might be used for something
14377 else in other part of the routine. */
14378 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14379 }
14380
14381 result = new_reg_loc_descr (regno, offset);
14382
14383 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14384 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14385
14386 return result;
14387 }
14388
14389 /* Return true if this RTL expression describes a base+offset calculation. */
14390
14391 static inline int
14392 is_based_loc (const_rtx rtl)
14393 {
14394 return (GET_CODE (rtl) == PLUS
14395 && ((REG_P (XEXP (rtl, 0))
14396 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14397 && CONST_INT_P (XEXP (rtl, 1)))));
14398 }
14399
14400 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14401 failed. */
14402
14403 static dw_loc_descr_ref
14404 tls_mem_loc_descriptor (rtx mem)
14405 {
14406 tree base;
14407 dw_loc_descr_ref loc_result;
14408
14409 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14410 return NULL;
14411
14412 base = get_base_address (MEM_EXPR (mem));
14413 if (base == NULL
14414 || !VAR_P (base)
14415 || !DECL_THREAD_LOCAL_P (base))
14416 return NULL;
14417
14418 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14419 if (loc_result == NULL)
14420 return NULL;
14421
14422 if (maybe_ne (MEM_OFFSET (mem), 0))
14423 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14424
14425 return loc_result;
14426 }
14427
14428 /* Output debug info about reason why we failed to expand expression as dwarf
14429 expression. */
14430
14431 static void
14432 expansion_failed (tree expr, rtx rtl, char const *reason)
14433 {
14434 if (dump_file && (dump_flags & TDF_DETAILS))
14435 {
14436 fprintf (dump_file, "Failed to expand as dwarf: ");
14437 if (expr)
14438 print_generic_expr (dump_file, expr, dump_flags);
14439 if (rtl)
14440 {
14441 fprintf (dump_file, "\n");
14442 print_rtl (dump_file, rtl);
14443 }
14444 fprintf (dump_file, "\nReason: %s\n", reason);
14445 }
14446 }
14447
14448 /* Helper function for const_ok_for_output. */
14449
14450 static bool
14451 const_ok_for_output_1 (rtx rtl)
14452 {
14453 if (targetm.const_not_ok_for_debug_p (rtl))
14454 {
14455 if (GET_CODE (rtl) != UNSPEC)
14456 {
14457 expansion_failed (NULL_TREE, rtl,
14458 "Expression rejected for debug by the backend.\n");
14459 return false;
14460 }
14461
14462 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14463 the target hook doesn't explicitly allow it in debug info, assume
14464 we can't express it in the debug info. */
14465 /* Don't complain about TLS UNSPECs, those are just too hard to
14466 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14467 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14468 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14469 if (flag_checking
14470 && (XVECLEN (rtl, 0) == 0
14471 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14472 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14473 inform (current_function_decl
14474 ? DECL_SOURCE_LOCATION (current_function_decl)
14475 : UNKNOWN_LOCATION,
14476 #if NUM_UNSPEC_VALUES > 0
14477 "non-delegitimized UNSPEC %s (%d) found in variable location",
14478 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14479 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14480 XINT (rtl, 1));
14481 #else
14482 "non-delegitimized UNSPEC %d found in variable location",
14483 XINT (rtl, 1));
14484 #endif
14485 expansion_failed (NULL_TREE, rtl,
14486 "UNSPEC hasn't been delegitimized.\n");
14487 return false;
14488 }
14489
14490 if (CONST_POLY_INT_P (rtl))
14491 return false;
14492
14493 if (targetm.const_not_ok_for_debug_p (rtl))
14494 {
14495 expansion_failed (NULL_TREE, rtl,
14496 "Expression rejected for debug by the backend.\n");
14497 return false;
14498 }
14499
14500 /* FIXME: Refer to PR60655. It is possible for simplification
14501 of rtl expressions in var tracking to produce such expressions.
14502 We should really identify / validate expressions
14503 enclosed in CONST that can be handled by assemblers on various
14504 targets and only handle legitimate cases here. */
14505 switch (GET_CODE (rtl))
14506 {
14507 case SYMBOL_REF:
14508 break;
14509 case NOT:
14510 case NEG:
14511 return false;
14512 default:
14513 return true;
14514 }
14515
14516 if (CONSTANT_POOL_ADDRESS_P (rtl))
14517 {
14518 bool marked;
14519 get_pool_constant_mark (rtl, &marked);
14520 /* If all references to this pool constant were optimized away,
14521 it was not output and thus we can't represent it. */
14522 if (!marked)
14523 {
14524 expansion_failed (NULL_TREE, rtl,
14525 "Constant was removed from constant pool.\n");
14526 return false;
14527 }
14528 }
14529
14530 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14531 return false;
14532
14533 /* Avoid references to external symbols in debug info, on several targets
14534 the linker might even refuse to link when linking a shared library,
14535 and in many other cases the relocations for .debug_info/.debug_loc are
14536 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14537 to be defined within the same shared library or executable are fine. */
14538 if (SYMBOL_REF_EXTERNAL_P (rtl))
14539 {
14540 tree decl = SYMBOL_REF_DECL (rtl);
14541
14542 if (decl == NULL || !targetm.binds_local_p (decl))
14543 {
14544 expansion_failed (NULL_TREE, rtl,
14545 "Symbol not defined in current TU.\n");
14546 return false;
14547 }
14548 }
14549
14550 return true;
14551 }
14552
14553 /* Return true if constant RTL can be emitted in DW_OP_addr or
14554 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14555 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14556
14557 static bool
14558 const_ok_for_output (rtx rtl)
14559 {
14560 if (GET_CODE (rtl) == SYMBOL_REF)
14561 return const_ok_for_output_1 (rtl);
14562
14563 if (GET_CODE (rtl) == CONST)
14564 {
14565 subrtx_var_iterator::array_type array;
14566 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14567 if (!const_ok_for_output_1 (*iter))
14568 return false;
14569 return true;
14570 }
14571
14572 return true;
14573 }
14574
14575 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14576 if possible, NULL otherwise. */
14577
14578 static dw_die_ref
14579 base_type_for_mode (machine_mode mode, bool unsignedp)
14580 {
14581 dw_die_ref type_die;
14582 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14583
14584 if (type == NULL)
14585 return NULL;
14586 switch (TREE_CODE (type))
14587 {
14588 case INTEGER_TYPE:
14589 case REAL_TYPE:
14590 break;
14591 default:
14592 return NULL;
14593 }
14594 type_die = lookup_type_die (type);
14595 if (!type_die)
14596 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14597 comp_unit_die ());
14598 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14599 return NULL;
14600 return type_die;
14601 }
14602
14603 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14604 type matching MODE, or, if MODE is narrower than or as wide as
14605 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14606 possible. */
14607
14608 static dw_loc_descr_ref
14609 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14610 {
14611 machine_mode outer_mode = mode;
14612 dw_die_ref type_die;
14613 dw_loc_descr_ref cvt;
14614
14615 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14616 {
14617 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14618 return op;
14619 }
14620 type_die = base_type_for_mode (outer_mode, 1);
14621 if (type_die == NULL)
14622 return NULL;
14623 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14624 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14625 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14626 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14627 add_loc_descr (&op, cvt);
14628 return op;
14629 }
14630
14631 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14632
14633 static dw_loc_descr_ref
14634 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14635 dw_loc_descr_ref op1)
14636 {
14637 dw_loc_descr_ref ret = op0;
14638 add_loc_descr (&ret, op1);
14639 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14640 if (STORE_FLAG_VALUE != 1)
14641 {
14642 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14643 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14644 }
14645 return ret;
14646 }
14647
14648 /* Subroutine of scompare_loc_descriptor for the case in which we're
14649 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14650 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14651
14652 static dw_loc_descr_ref
14653 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14654 scalar_int_mode op_mode,
14655 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14656 {
14657 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14658 dw_loc_descr_ref cvt;
14659
14660 if (type_die == NULL)
14661 return NULL;
14662 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14663 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14664 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14665 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14666 add_loc_descr (&op0, cvt);
14667 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14668 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14669 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14670 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14671 add_loc_descr (&op1, cvt);
14672 return compare_loc_descriptor (op, op0, op1);
14673 }
14674
14675 /* Subroutine of scompare_loc_descriptor for the case in which we're
14676 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14677 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14678
14679 static dw_loc_descr_ref
14680 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14681 scalar_int_mode op_mode,
14682 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14683 {
14684 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14685 /* For eq/ne, if the operands are known to be zero-extended,
14686 there is no need to do the fancy shifting up. */
14687 if (op == DW_OP_eq || op == DW_OP_ne)
14688 {
14689 dw_loc_descr_ref last0, last1;
14690 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14691 ;
14692 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14693 ;
14694 /* deref_size zero extends, and for constants we can check
14695 whether they are zero extended or not. */
14696 if (((last0->dw_loc_opc == DW_OP_deref_size
14697 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14698 || (CONST_INT_P (XEXP (rtl, 0))
14699 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14700 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14701 && ((last1->dw_loc_opc == DW_OP_deref_size
14702 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14703 || (CONST_INT_P (XEXP (rtl, 1))
14704 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14705 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14706 return compare_loc_descriptor (op, op0, op1);
14707
14708 /* EQ/NE comparison against constant in narrower type than
14709 DWARF2_ADDR_SIZE can be performed either as
14710 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14711 DW_OP_{eq,ne}
14712 or
14713 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14714 DW_OP_{eq,ne}. Pick whatever is shorter. */
14715 if (CONST_INT_P (XEXP (rtl, 1))
14716 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14717 && (size_of_int_loc_descriptor (shift) + 1
14718 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14719 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14720 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14721 & GET_MODE_MASK (op_mode))))
14722 {
14723 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14724 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14725 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14726 & GET_MODE_MASK (op_mode));
14727 return compare_loc_descriptor (op, op0, op1);
14728 }
14729 }
14730 add_loc_descr (&op0, int_loc_descriptor (shift));
14731 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14732 if (CONST_INT_P (XEXP (rtl, 1)))
14733 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14734 else
14735 {
14736 add_loc_descr (&op1, int_loc_descriptor (shift));
14737 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14738 }
14739 return compare_loc_descriptor (op, op0, op1);
14740 }
14741
14742 /* Return location descriptor for unsigned comparison OP RTL. */
14743
14744 static dw_loc_descr_ref
14745 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14746 machine_mode mem_mode)
14747 {
14748 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14749 dw_loc_descr_ref op0, op1;
14750
14751 if (op_mode == VOIDmode)
14752 op_mode = GET_MODE (XEXP (rtl, 1));
14753 if (op_mode == VOIDmode)
14754 return NULL;
14755
14756 scalar_int_mode int_op_mode;
14757 if (dwarf_strict
14758 && dwarf_version < 5
14759 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14760 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14761 return NULL;
14762
14763 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14764 VAR_INIT_STATUS_INITIALIZED);
14765 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14766 VAR_INIT_STATUS_INITIALIZED);
14767
14768 if (op0 == NULL || op1 == NULL)
14769 return NULL;
14770
14771 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14772 {
14773 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14774 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14775
14776 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14777 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14778 }
14779 return compare_loc_descriptor (op, op0, op1);
14780 }
14781
14782 /* Return location descriptor for unsigned comparison OP RTL. */
14783
14784 static dw_loc_descr_ref
14785 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14786 machine_mode mem_mode)
14787 {
14788 dw_loc_descr_ref op0, op1;
14789
14790 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14791 if (test_op_mode == VOIDmode)
14792 test_op_mode = GET_MODE (XEXP (rtl, 1));
14793
14794 scalar_int_mode op_mode;
14795 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14796 return NULL;
14797
14798 if (dwarf_strict
14799 && dwarf_version < 5
14800 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14801 return NULL;
14802
14803 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14804 VAR_INIT_STATUS_INITIALIZED);
14805 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14806 VAR_INIT_STATUS_INITIALIZED);
14807
14808 if (op0 == NULL || op1 == NULL)
14809 return NULL;
14810
14811 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14812 {
14813 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14814 dw_loc_descr_ref last0, last1;
14815 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14816 ;
14817 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14818 ;
14819 if (CONST_INT_P (XEXP (rtl, 0)))
14820 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14821 /* deref_size zero extends, so no need to mask it again. */
14822 else if (last0->dw_loc_opc != DW_OP_deref_size
14823 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14824 {
14825 add_loc_descr (&op0, int_loc_descriptor (mask));
14826 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14827 }
14828 if (CONST_INT_P (XEXP (rtl, 1)))
14829 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14830 /* deref_size zero extends, so no need to mask it again. */
14831 else if (last1->dw_loc_opc != DW_OP_deref_size
14832 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14833 {
14834 add_loc_descr (&op1, int_loc_descriptor (mask));
14835 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14836 }
14837 }
14838 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14839 {
14840 HOST_WIDE_INT bias = 1;
14841 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14842 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14843 if (CONST_INT_P (XEXP (rtl, 1)))
14844 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14845 + INTVAL (XEXP (rtl, 1)));
14846 else
14847 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14848 bias, 0));
14849 }
14850 return compare_loc_descriptor (op, op0, op1);
14851 }
14852
14853 /* Return location descriptor for {U,S}{MIN,MAX}. */
14854
14855 static dw_loc_descr_ref
14856 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14857 machine_mode mem_mode)
14858 {
14859 enum dwarf_location_atom op;
14860 dw_loc_descr_ref op0, op1, ret;
14861 dw_loc_descr_ref bra_node, drop_node;
14862
14863 scalar_int_mode int_mode;
14864 if (dwarf_strict
14865 && dwarf_version < 5
14866 && (!is_a <scalar_int_mode> (mode, &int_mode)
14867 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14868 return NULL;
14869
14870 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14871 VAR_INIT_STATUS_INITIALIZED);
14872 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14873 VAR_INIT_STATUS_INITIALIZED);
14874
14875 if (op0 == NULL || op1 == NULL)
14876 return NULL;
14877
14878 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14879 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14880 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14881 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14882 {
14883 /* Checked by the caller. */
14884 int_mode = as_a <scalar_int_mode> (mode);
14885 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14886 {
14887 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14888 add_loc_descr (&op0, int_loc_descriptor (mask));
14889 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14890 add_loc_descr (&op1, int_loc_descriptor (mask));
14891 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14892 }
14893 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14894 {
14895 HOST_WIDE_INT bias = 1;
14896 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14897 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14898 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14899 }
14900 }
14901 else if (is_a <scalar_int_mode> (mode, &int_mode)
14902 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14903 {
14904 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14905 add_loc_descr (&op0, int_loc_descriptor (shift));
14906 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14907 add_loc_descr (&op1, int_loc_descriptor (shift));
14908 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14909 }
14910 else if (is_a <scalar_int_mode> (mode, &int_mode)
14911 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14912 {
14913 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14914 dw_loc_descr_ref cvt;
14915 if (type_die == NULL)
14916 return NULL;
14917 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14918 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14919 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14920 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14921 add_loc_descr (&op0, cvt);
14922 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14923 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14924 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14925 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14926 add_loc_descr (&op1, cvt);
14927 }
14928
14929 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14930 op = DW_OP_lt;
14931 else
14932 op = DW_OP_gt;
14933 ret = op0;
14934 add_loc_descr (&ret, op1);
14935 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14936 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14937 add_loc_descr (&ret, bra_node);
14938 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14939 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14940 add_loc_descr (&ret, drop_node);
14941 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14942 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14943 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14944 && is_a <scalar_int_mode> (mode, &int_mode)
14945 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14946 ret = convert_descriptor_to_mode (int_mode, ret);
14947 return ret;
14948 }
14949
14950 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14951 but after converting arguments to type_die, afterwards
14952 convert back to unsigned. */
14953
14954 static dw_loc_descr_ref
14955 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14956 scalar_int_mode mode, machine_mode mem_mode)
14957 {
14958 dw_loc_descr_ref cvt, op0, op1;
14959
14960 if (type_die == NULL)
14961 return NULL;
14962 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14963 VAR_INIT_STATUS_INITIALIZED);
14964 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14965 VAR_INIT_STATUS_INITIALIZED);
14966 if (op0 == NULL || op1 == NULL)
14967 return NULL;
14968 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14969 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14970 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14971 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14972 add_loc_descr (&op0, cvt);
14973 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14974 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14975 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14976 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14977 add_loc_descr (&op1, cvt);
14978 add_loc_descr (&op0, op1);
14979 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14980 return convert_descriptor_to_mode (mode, op0);
14981 }
14982
14983 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14984 const0 is DW_OP_lit0 or corresponding typed constant,
14985 const1 is DW_OP_lit1 or corresponding typed constant
14986 and constMSB is constant with just the MSB bit set
14987 for the mode):
14988 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14989 L1: const0 DW_OP_swap
14990 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14991 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14992 L3: DW_OP_drop
14993 L4: DW_OP_nop
14994
14995 CTZ is similar:
14996 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14997 L1: const0 DW_OP_swap
14998 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14999 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15000 L3: DW_OP_drop
15001 L4: DW_OP_nop
15002
15003 FFS is similar:
15004 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
15005 L1: const1 DW_OP_swap
15006 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15007 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15008 L3: DW_OP_drop
15009 L4: DW_OP_nop */
15010
15011 static dw_loc_descr_ref
15012 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
15013 machine_mode mem_mode)
15014 {
15015 dw_loc_descr_ref op0, ret, tmp;
15016 HOST_WIDE_INT valv;
15017 dw_loc_descr_ref l1jump, l1label;
15018 dw_loc_descr_ref l2jump, l2label;
15019 dw_loc_descr_ref l3jump, l3label;
15020 dw_loc_descr_ref l4jump, l4label;
15021 rtx msb;
15022
15023 if (GET_MODE (XEXP (rtl, 0)) != mode)
15024 return NULL;
15025
15026 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15027 VAR_INIT_STATUS_INITIALIZED);
15028 if (op0 == NULL)
15029 return NULL;
15030 ret = op0;
15031 if (GET_CODE (rtl) == CLZ)
15032 {
15033 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15034 valv = GET_MODE_BITSIZE (mode);
15035 }
15036 else if (GET_CODE (rtl) == FFS)
15037 valv = 0;
15038 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15039 valv = GET_MODE_BITSIZE (mode);
15040 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15041 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15042 add_loc_descr (&ret, l1jump);
15043 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15044 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15045 VAR_INIT_STATUS_INITIALIZED);
15046 if (tmp == NULL)
15047 return NULL;
15048 add_loc_descr (&ret, tmp);
15049 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15050 add_loc_descr (&ret, l4jump);
15051 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15052 ? const1_rtx : const0_rtx,
15053 mode, mem_mode,
15054 VAR_INIT_STATUS_INITIALIZED);
15055 if (l1label == NULL)
15056 return NULL;
15057 add_loc_descr (&ret, l1label);
15058 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15059 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15060 add_loc_descr (&ret, l2label);
15061 if (GET_CODE (rtl) != CLZ)
15062 msb = const1_rtx;
15063 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15064 msb = GEN_INT (HOST_WIDE_INT_1U
15065 << (GET_MODE_BITSIZE (mode) - 1));
15066 else
15067 msb = immed_wide_int_const
15068 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15069 GET_MODE_PRECISION (mode)), mode);
15070 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15071 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15072 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15073 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15074 else
15075 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15076 VAR_INIT_STATUS_INITIALIZED);
15077 if (tmp == NULL)
15078 return NULL;
15079 add_loc_descr (&ret, tmp);
15080 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15081 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15082 add_loc_descr (&ret, l3jump);
15083 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15084 VAR_INIT_STATUS_INITIALIZED);
15085 if (tmp == NULL)
15086 return NULL;
15087 add_loc_descr (&ret, tmp);
15088 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15089 ? DW_OP_shl : DW_OP_shr, 0, 0));
15090 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15091 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15092 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15093 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15094 add_loc_descr (&ret, l2jump);
15095 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15096 add_loc_descr (&ret, l3label);
15097 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15098 add_loc_descr (&ret, l4label);
15099 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15100 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15101 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15102 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15103 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15104 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15105 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15106 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15107 return ret;
15108 }
15109
15110 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15111 const1 is DW_OP_lit1 or corresponding typed constant):
15112 const0 DW_OP_swap
15113 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15114 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15115 L2: DW_OP_drop
15116
15117 PARITY is similar:
15118 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15119 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15120 L2: DW_OP_drop */
15121
15122 static dw_loc_descr_ref
15123 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15124 machine_mode mem_mode)
15125 {
15126 dw_loc_descr_ref op0, ret, tmp;
15127 dw_loc_descr_ref l1jump, l1label;
15128 dw_loc_descr_ref l2jump, l2label;
15129
15130 if (GET_MODE (XEXP (rtl, 0)) != mode)
15131 return NULL;
15132
15133 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15134 VAR_INIT_STATUS_INITIALIZED);
15135 if (op0 == NULL)
15136 return NULL;
15137 ret = op0;
15138 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15139 VAR_INIT_STATUS_INITIALIZED);
15140 if (tmp == NULL)
15141 return NULL;
15142 add_loc_descr (&ret, tmp);
15143 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15144 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15145 add_loc_descr (&ret, l1label);
15146 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15147 add_loc_descr (&ret, l2jump);
15148 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15149 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15150 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15151 VAR_INIT_STATUS_INITIALIZED);
15152 if (tmp == NULL)
15153 return NULL;
15154 add_loc_descr (&ret, tmp);
15155 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15156 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15157 ? DW_OP_plus : DW_OP_xor, 0, 0));
15158 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15159 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15160 VAR_INIT_STATUS_INITIALIZED);
15161 add_loc_descr (&ret, tmp);
15162 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15163 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15164 add_loc_descr (&ret, l1jump);
15165 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15166 add_loc_descr (&ret, l2label);
15167 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15168 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15169 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15170 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15171 return ret;
15172 }
15173
15174 /* BSWAP (constS is initial shift count, either 56 or 24):
15175 constS const0
15176 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15177 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15178 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15179 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15180 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15181
15182 static dw_loc_descr_ref
15183 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15184 machine_mode mem_mode)
15185 {
15186 dw_loc_descr_ref op0, ret, tmp;
15187 dw_loc_descr_ref l1jump, l1label;
15188 dw_loc_descr_ref l2jump, l2label;
15189
15190 if (BITS_PER_UNIT != 8
15191 || (GET_MODE_BITSIZE (mode) != 32
15192 && GET_MODE_BITSIZE (mode) != 64))
15193 return NULL;
15194
15195 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15196 VAR_INIT_STATUS_INITIALIZED);
15197 if (op0 == NULL)
15198 return NULL;
15199
15200 ret = op0;
15201 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15202 mode, mem_mode,
15203 VAR_INIT_STATUS_INITIALIZED);
15204 if (tmp == NULL)
15205 return NULL;
15206 add_loc_descr (&ret, tmp);
15207 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15208 VAR_INIT_STATUS_INITIALIZED);
15209 if (tmp == NULL)
15210 return NULL;
15211 add_loc_descr (&ret, tmp);
15212 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15213 add_loc_descr (&ret, l1label);
15214 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15215 mode, mem_mode,
15216 VAR_INIT_STATUS_INITIALIZED);
15217 add_loc_descr (&ret, tmp);
15218 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15219 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15220 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15221 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15222 VAR_INIT_STATUS_INITIALIZED);
15223 if (tmp == NULL)
15224 return NULL;
15225 add_loc_descr (&ret, tmp);
15226 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15227 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15228 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15229 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15230 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15231 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15232 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15233 VAR_INIT_STATUS_INITIALIZED);
15234 add_loc_descr (&ret, tmp);
15235 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15236 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15237 add_loc_descr (&ret, l2jump);
15238 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15239 VAR_INIT_STATUS_INITIALIZED);
15240 add_loc_descr (&ret, tmp);
15241 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15242 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15243 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15244 add_loc_descr (&ret, l1jump);
15245 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15246 add_loc_descr (&ret, l2label);
15247 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15248 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15249 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15250 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15251 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15252 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15253 return ret;
15254 }
15255
15256 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15257 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15258 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15259 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15260
15261 ROTATERT is similar:
15262 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15263 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15264 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15265
15266 static dw_loc_descr_ref
15267 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15268 machine_mode mem_mode)
15269 {
15270 rtx rtlop1 = XEXP (rtl, 1);
15271 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15272 int i;
15273
15274 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15275 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15276 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15277 VAR_INIT_STATUS_INITIALIZED);
15278 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15279 VAR_INIT_STATUS_INITIALIZED);
15280 if (op0 == NULL || op1 == NULL)
15281 return NULL;
15282 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15283 for (i = 0; i < 2; i++)
15284 {
15285 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15286 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15287 mode, mem_mode,
15288 VAR_INIT_STATUS_INITIALIZED);
15289 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15290 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15291 ? DW_OP_const4u
15292 : HOST_BITS_PER_WIDE_INT == 64
15293 ? DW_OP_const8u : DW_OP_constu,
15294 GET_MODE_MASK (mode), 0);
15295 else
15296 mask[i] = NULL;
15297 if (mask[i] == NULL)
15298 return NULL;
15299 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15300 }
15301 ret = op0;
15302 add_loc_descr (&ret, op1);
15303 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15304 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15305 if (GET_CODE (rtl) == ROTATERT)
15306 {
15307 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15308 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15309 GET_MODE_BITSIZE (mode), 0));
15310 }
15311 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15312 if (mask[0] != NULL)
15313 add_loc_descr (&ret, mask[0]);
15314 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15315 if (mask[1] != NULL)
15316 {
15317 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15318 add_loc_descr (&ret, mask[1]);
15319 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15320 }
15321 if (GET_CODE (rtl) == ROTATE)
15322 {
15323 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15324 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15325 GET_MODE_BITSIZE (mode), 0));
15326 }
15327 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15328 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15329 return ret;
15330 }
15331
15332 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15333 for DEBUG_PARAMETER_REF RTL. */
15334
15335 static dw_loc_descr_ref
15336 parameter_ref_descriptor (rtx rtl)
15337 {
15338 dw_loc_descr_ref ret;
15339 dw_die_ref ref;
15340
15341 if (dwarf_strict)
15342 return NULL;
15343 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15344 /* With LTO during LTRANS we get the late DIE that refers to the early
15345 DIE, thus we add another indirection here. This seems to confuse
15346 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15347 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15348 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15349 if (ref)
15350 {
15351 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15352 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15353 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15354 }
15355 else
15356 {
15357 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15358 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15359 }
15360 return ret;
15361 }
15362
15363 /* The following routine converts the RTL for a variable or parameter
15364 (resident in memory) into an equivalent Dwarf representation of a
15365 mechanism for getting the address of that same variable onto the top of a
15366 hypothetical "address evaluation" stack.
15367
15368 When creating memory location descriptors, we are effectively transforming
15369 the RTL for a memory-resident object into its Dwarf postfix expression
15370 equivalent. This routine recursively descends an RTL tree, turning
15371 it into Dwarf postfix code as it goes.
15372
15373 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15374
15375 MEM_MODE is the mode of the memory reference, needed to handle some
15376 autoincrement addressing modes.
15377
15378 Return 0 if we can't represent the location. */
15379
15380 dw_loc_descr_ref
15381 mem_loc_descriptor (rtx rtl, machine_mode mode,
15382 machine_mode mem_mode,
15383 enum var_init_status initialized)
15384 {
15385 dw_loc_descr_ref mem_loc_result = NULL;
15386 enum dwarf_location_atom op;
15387 dw_loc_descr_ref op0, op1;
15388 rtx inner = NULL_RTX;
15389 poly_int64 offset;
15390
15391 if (mode == VOIDmode)
15392 mode = GET_MODE (rtl);
15393
15394 /* Note that for a dynamically sized array, the location we will generate a
15395 description of here will be the lowest numbered location which is
15396 actually within the array. That's *not* necessarily the same as the
15397 zeroth element of the array. */
15398
15399 rtl = targetm.delegitimize_address (rtl);
15400
15401 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15402 return NULL;
15403
15404 scalar_int_mode int_mode, inner_mode, op1_mode;
15405 switch (GET_CODE (rtl))
15406 {
15407 case POST_INC:
15408 case POST_DEC:
15409 case POST_MODIFY:
15410 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15411
15412 case SUBREG:
15413 /* The case of a subreg may arise when we have a local (register)
15414 variable or a formal (register) parameter which doesn't quite fill
15415 up an entire register. For now, just assume that it is
15416 legitimate to make the Dwarf info refer to the whole register which
15417 contains the given subreg. */
15418 if (!subreg_lowpart_p (rtl))
15419 break;
15420 inner = SUBREG_REG (rtl);
15421 /* FALLTHRU */
15422 case TRUNCATE:
15423 if (inner == NULL_RTX)
15424 inner = XEXP (rtl, 0);
15425 if (is_a <scalar_int_mode> (mode, &int_mode)
15426 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15427 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15428 #ifdef POINTERS_EXTEND_UNSIGNED
15429 || (int_mode == Pmode && mem_mode != VOIDmode)
15430 #endif
15431 )
15432 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15433 {
15434 mem_loc_result = mem_loc_descriptor (inner,
15435 inner_mode,
15436 mem_mode, initialized);
15437 break;
15438 }
15439 if (dwarf_strict && dwarf_version < 5)
15440 break;
15441 if (is_a <scalar_int_mode> (mode, &int_mode)
15442 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15443 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15444 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15445 {
15446 dw_die_ref type_die;
15447 dw_loc_descr_ref cvt;
15448
15449 mem_loc_result = mem_loc_descriptor (inner,
15450 GET_MODE (inner),
15451 mem_mode, initialized);
15452 if (mem_loc_result == NULL)
15453 break;
15454 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15455 if (type_die == NULL)
15456 {
15457 mem_loc_result = NULL;
15458 break;
15459 }
15460 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15461 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15462 else
15463 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15464 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15465 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15466 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15467 add_loc_descr (&mem_loc_result, cvt);
15468 if (is_a <scalar_int_mode> (mode, &int_mode)
15469 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15470 {
15471 /* Convert it to untyped afterwards. */
15472 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15473 add_loc_descr (&mem_loc_result, cvt);
15474 }
15475 }
15476 break;
15477
15478 case REG:
15479 if (!is_a <scalar_int_mode> (mode, &int_mode)
15480 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15481 && rtl != arg_pointer_rtx
15482 && rtl != frame_pointer_rtx
15483 #ifdef POINTERS_EXTEND_UNSIGNED
15484 && (int_mode != Pmode || mem_mode == VOIDmode)
15485 #endif
15486 ))
15487 {
15488 dw_die_ref type_die;
15489 unsigned int dbx_regnum;
15490
15491 if (dwarf_strict && dwarf_version < 5)
15492 break;
15493 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15494 break;
15495 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15496 if (type_die == NULL)
15497 break;
15498
15499 dbx_regnum = dbx_reg_number (rtl);
15500 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15501 break;
15502 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15503 dbx_regnum, 0);
15504 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15505 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15506 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15507 break;
15508 }
15509 /* Whenever a register number forms a part of the description of the
15510 method for calculating the (dynamic) address of a memory resident
15511 object, DWARF rules require the register number be referred to as
15512 a "base register". This distinction is not based in any way upon
15513 what category of register the hardware believes the given register
15514 belongs to. This is strictly DWARF terminology we're dealing with
15515 here. Note that in cases where the location of a memory-resident
15516 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15517 OP_CONST (0)) the actual DWARF location descriptor that we generate
15518 may just be OP_BASEREG (basereg). This may look deceptively like
15519 the object in question was allocated to a register (rather than in
15520 memory) so DWARF consumers need to be aware of the subtle
15521 distinction between OP_REG and OP_BASEREG. */
15522 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15523 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15524 else if (stack_realign_drap
15525 && crtl->drap_reg
15526 && crtl->args.internal_arg_pointer == rtl
15527 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15528 {
15529 /* If RTL is internal_arg_pointer, which has been optimized
15530 out, use DRAP instead. */
15531 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15532 VAR_INIT_STATUS_INITIALIZED);
15533 }
15534 break;
15535
15536 case SIGN_EXTEND:
15537 case ZERO_EXTEND:
15538 if (!is_a <scalar_int_mode> (mode, &int_mode)
15539 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15540 break;
15541 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15542 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15543 if (op0 == 0)
15544 break;
15545 else if (GET_CODE (rtl) == ZERO_EXTEND
15546 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15547 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15548 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15549 to expand zero extend as two shifts instead of
15550 masking. */
15551 && GET_MODE_SIZE (inner_mode) <= 4)
15552 {
15553 mem_loc_result = op0;
15554 add_loc_descr (&mem_loc_result,
15555 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15556 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15557 }
15558 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15559 {
15560 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15561 shift *= BITS_PER_UNIT;
15562 if (GET_CODE (rtl) == SIGN_EXTEND)
15563 op = DW_OP_shra;
15564 else
15565 op = DW_OP_shr;
15566 mem_loc_result = op0;
15567 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15568 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15569 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15570 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15571 }
15572 else if (!dwarf_strict || dwarf_version >= 5)
15573 {
15574 dw_die_ref type_die1, type_die2;
15575 dw_loc_descr_ref cvt;
15576
15577 type_die1 = base_type_for_mode (inner_mode,
15578 GET_CODE (rtl) == ZERO_EXTEND);
15579 if (type_die1 == NULL)
15580 break;
15581 type_die2 = base_type_for_mode (int_mode, 1);
15582 if (type_die2 == NULL)
15583 break;
15584 mem_loc_result = op0;
15585 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15586 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15587 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15588 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15589 add_loc_descr (&mem_loc_result, cvt);
15590 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15591 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15592 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15593 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15594 add_loc_descr (&mem_loc_result, cvt);
15595 }
15596 break;
15597
15598 case MEM:
15599 {
15600 rtx new_rtl = avoid_constant_pool_reference (rtl);
15601 if (new_rtl != rtl)
15602 {
15603 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15604 initialized);
15605 if (mem_loc_result != NULL)
15606 return mem_loc_result;
15607 }
15608 }
15609 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15610 get_address_mode (rtl), mode,
15611 VAR_INIT_STATUS_INITIALIZED);
15612 if (mem_loc_result == NULL)
15613 mem_loc_result = tls_mem_loc_descriptor (rtl);
15614 if (mem_loc_result != NULL)
15615 {
15616 if (!is_a <scalar_int_mode> (mode, &int_mode)
15617 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15618 {
15619 dw_die_ref type_die;
15620 dw_loc_descr_ref deref;
15621 HOST_WIDE_INT size;
15622
15623 if (dwarf_strict && dwarf_version < 5)
15624 return NULL;
15625 if (!GET_MODE_SIZE (mode).is_constant (&size))
15626 return NULL;
15627 type_die
15628 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15629 if (type_die == NULL)
15630 return NULL;
15631 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15632 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15633 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15634 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15635 add_loc_descr (&mem_loc_result, deref);
15636 }
15637 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15638 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15639 else
15640 add_loc_descr (&mem_loc_result,
15641 new_loc_descr (DW_OP_deref_size,
15642 GET_MODE_SIZE (int_mode), 0));
15643 }
15644 break;
15645
15646 case LO_SUM:
15647 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15648
15649 case LABEL_REF:
15650 /* Some ports can transform a symbol ref into a label ref, because
15651 the symbol ref is too far away and has to be dumped into a constant
15652 pool. */
15653 case CONST:
15654 case SYMBOL_REF:
15655 if (!is_a <scalar_int_mode> (mode, &int_mode)
15656 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15657 #ifdef POINTERS_EXTEND_UNSIGNED
15658 && (int_mode != Pmode || mem_mode == VOIDmode)
15659 #endif
15660 ))
15661 break;
15662 if (GET_CODE (rtl) == SYMBOL_REF
15663 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15664 {
15665 dw_loc_descr_ref temp;
15666
15667 /* If this is not defined, we have no way to emit the data. */
15668 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15669 break;
15670
15671 temp = new_addr_loc_descr (rtl, dtprel_true);
15672
15673 /* We check for DWARF 5 here because gdb did not implement
15674 DW_OP_form_tls_address until after 7.12. */
15675 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15676 ? DW_OP_form_tls_address
15677 : DW_OP_GNU_push_tls_address),
15678 0, 0);
15679 add_loc_descr (&mem_loc_result, temp);
15680
15681 break;
15682 }
15683
15684 if (!const_ok_for_output (rtl))
15685 {
15686 if (GET_CODE (rtl) == CONST)
15687 switch (GET_CODE (XEXP (rtl, 0)))
15688 {
15689 case NOT:
15690 op = DW_OP_not;
15691 goto try_const_unop;
15692 case NEG:
15693 op = DW_OP_neg;
15694 goto try_const_unop;
15695 try_const_unop:
15696 rtx arg;
15697 arg = XEXP (XEXP (rtl, 0), 0);
15698 if (!CONSTANT_P (arg))
15699 arg = gen_rtx_CONST (int_mode, arg);
15700 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15701 initialized);
15702 if (op0)
15703 {
15704 mem_loc_result = op0;
15705 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15706 }
15707 break;
15708 default:
15709 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15710 mem_mode, initialized);
15711 break;
15712 }
15713 break;
15714 }
15715
15716 symref:
15717 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15718 vec_safe_push (used_rtx_array, rtl);
15719 break;
15720
15721 case CONCAT:
15722 case CONCATN:
15723 case VAR_LOCATION:
15724 case DEBUG_IMPLICIT_PTR:
15725 expansion_failed (NULL_TREE, rtl,
15726 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15727 return 0;
15728
15729 case ENTRY_VALUE:
15730 if (dwarf_strict && dwarf_version < 5)
15731 return NULL;
15732 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15733 {
15734 if (!is_a <scalar_int_mode> (mode, &int_mode)
15735 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15736 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15737 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15738 else
15739 {
15740 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15741 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15742 return NULL;
15743 op0 = one_reg_loc_descriptor (dbx_regnum,
15744 VAR_INIT_STATUS_INITIALIZED);
15745 }
15746 }
15747 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15748 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15749 {
15750 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15751 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15752 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15753 return NULL;
15754 }
15755 else
15756 gcc_unreachable ();
15757 if (op0 == NULL)
15758 return NULL;
15759 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15760 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15761 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15762 break;
15763
15764 case DEBUG_PARAMETER_REF:
15765 mem_loc_result = parameter_ref_descriptor (rtl);
15766 break;
15767
15768 case PRE_MODIFY:
15769 /* Extract the PLUS expression nested inside and fall into
15770 PLUS code below. */
15771 rtl = XEXP (rtl, 1);
15772 goto plus;
15773
15774 case PRE_INC:
15775 case PRE_DEC:
15776 /* Turn these into a PLUS expression and fall into the PLUS code
15777 below. */
15778 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15779 gen_int_mode (GET_CODE (rtl) == PRE_INC
15780 ? GET_MODE_UNIT_SIZE (mem_mode)
15781 : -GET_MODE_UNIT_SIZE (mem_mode),
15782 mode));
15783
15784 /* fall through */
15785
15786 case PLUS:
15787 plus:
15788 if (is_based_loc (rtl)
15789 && is_a <scalar_int_mode> (mode, &int_mode)
15790 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15791 || XEXP (rtl, 0) == arg_pointer_rtx
15792 || XEXP (rtl, 0) == frame_pointer_rtx))
15793 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15794 INTVAL (XEXP (rtl, 1)),
15795 VAR_INIT_STATUS_INITIALIZED);
15796 else
15797 {
15798 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15799 VAR_INIT_STATUS_INITIALIZED);
15800 if (mem_loc_result == 0)
15801 break;
15802
15803 if (CONST_INT_P (XEXP (rtl, 1))
15804 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15805 <= DWARF2_ADDR_SIZE))
15806 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15807 else
15808 {
15809 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15810 VAR_INIT_STATUS_INITIALIZED);
15811 if (op1 == 0)
15812 return NULL;
15813 add_loc_descr (&mem_loc_result, op1);
15814 add_loc_descr (&mem_loc_result,
15815 new_loc_descr (DW_OP_plus, 0, 0));
15816 }
15817 }
15818 break;
15819
15820 /* If a pseudo-reg is optimized away, it is possible for it to
15821 be replaced with a MEM containing a multiply or shift. */
15822 case MINUS:
15823 op = DW_OP_minus;
15824 goto do_binop;
15825
15826 case MULT:
15827 op = DW_OP_mul;
15828 goto do_binop;
15829
15830 case DIV:
15831 if ((!dwarf_strict || dwarf_version >= 5)
15832 && is_a <scalar_int_mode> (mode, &int_mode)
15833 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15834 {
15835 mem_loc_result = typed_binop (DW_OP_div, rtl,
15836 base_type_for_mode (mode, 0),
15837 int_mode, mem_mode);
15838 break;
15839 }
15840 op = DW_OP_div;
15841 goto do_binop;
15842
15843 case UMOD:
15844 op = DW_OP_mod;
15845 goto do_binop;
15846
15847 case ASHIFT:
15848 op = DW_OP_shl;
15849 goto do_shift;
15850
15851 case ASHIFTRT:
15852 op = DW_OP_shra;
15853 goto do_shift;
15854
15855 case LSHIFTRT:
15856 op = DW_OP_shr;
15857 goto do_shift;
15858
15859 do_shift:
15860 if (!is_a <scalar_int_mode> (mode, &int_mode))
15861 break;
15862 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15863 VAR_INIT_STATUS_INITIALIZED);
15864 {
15865 rtx rtlop1 = XEXP (rtl, 1);
15866 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15867 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15868 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15869 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15870 VAR_INIT_STATUS_INITIALIZED);
15871 }
15872
15873 if (op0 == 0 || op1 == 0)
15874 break;
15875
15876 mem_loc_result = op0;
15877 add_loc_descr (&mem_loc_result, op1);
15878 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15879 break;
15880
15881 case AND:
15882 op = DW_OP_and;
15883 goto do_binop;
15884
15885 case IOR:
15886 op = DW_OP_or;
15887 goto do_binop;
15888
15889 case XOR:
15890 op = DW_OP_xor;
15891 goto do_binop;
15892
15893 do_binop:
15894 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15895 VAR_INIT_STATUS_INITIALIZED);
15896 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15897 VAR_INIT_STATUS_INITIALIZED);
15898
15899 if (op0 == 0 || op1 == 0)
15900 break;
15901
15902 mem_loc_result = op0;
15903 add_loc_descr (&mem_loc_result, op1);
15904 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15905 break;
15906
15907 case MOD:
15908 if ((!dwarf_strict || dwarf_version >= 5)
15909 && is_a <scalar_int_mode> (mode, &int_mode)
15910 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15911 {
15912 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15913 base_type_for_mode (mode, 0),
15914 int_mode, mem_mode);
15915 break;
15916 }
15917
15918 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15919 VAR_INIT_STATUS_INITIALIZED);
15920 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15921 VAR_INIT_STATUS_INITIALIZED);
15922
15923 if (op0 == 0 || op1 == 0)
15924 break;
15925
15926 mem_loc_result = op0;
15927 add_loc_descr (&mem_loc_result, op1);
15928 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15929 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15930 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15931 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15932 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15933 break;
15934
15935 case UDIV:
15936 if ((!dwarf_strict || dwarf_version >= 5)
15937 && is_a <scalar_int_mode> (mode, &int_mode))
15938 {
15939 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15940 {
15941 op = DW_OP_div;
15942 goto do_binop;
15943 }
15944 mem_loc_result = typed_binop (DW_OP_div, rtl,
15945 base_type_for_mode (int_mode, 1),
15946 int_mode, mem_mode);
15947 }
15948 break;
15949
15950 case NOT:
15951 op = DW_OP_not;
15952 goto do_unop;
15953
15954 case ABS:
15955 op = DW_OP_abs;
15956 goto do_unop;
15957
15958 case NEG:
15959 op = DW_OP_neg;
15960 goto do_unop;
15961
15962 do_unop:
15963 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15964 VAR_INIT_STATUS_INITIALIZED);
15965
15966 if (op0 == 0)
15967 break;
15968
15969 mem_loc_result = op0;
15970 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15971 break;
15972
15973 case CONST_INT:
15974 if (!is_a <scalar_int_mode> (mode, &int_mode)
15975 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15976 #ifdef POINTERS_EXTEND_UNSIGNED
15977 || (int_mode == Pmode
15978 && mem_mode != VOIDmode
15979 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15980 #endif
15981 )
15982 {
15983 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15984 break;
15985 }
15986 if ((!dwarf_strict || dwarf_version >= 5)
15987 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15988 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15989 {
15990 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15991 scalar_int_mode amode;
15992 if (type_die == NULL)
15993 return NULL;
15994 if (INTVAL (rtl) >= 0
15995 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15996 .exists (&amode))
15997 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15998 /* const DW_OP_convert <XXX> vs.
15999 DW_OP_const_type <XXX, 1, const>. */
16000 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
16001 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
16002 {
16003 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16004 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16005 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16006 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16007 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
16008 add_loc_descr (&mem_loc_result, op0);
16009 return mem_loc_result;
16010 }
16011 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
16012 INTVAL (rtl));
16013 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16014 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16015 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16016 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16017 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16018 else
16019 {
16020 mem_loc_result->dw_loc_oprnd2.val_class
16021 = dw_val_class_const_double;
16022 mem_loc_result->dw_loc_oprnd2.v.val_double
16023 = double_int::from_shwi (INTVAL (rtl));
16024 }
16025 }
16026 break;
16027
16028 case CONST_DOUBLE:
16029 if (!dwarf_strict || dwarf_version >= 5)
16030 {
16031 dw_die_ref type_die;
16032
16033 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16034 CONST_DOUBLE rtx could represent either a large integer
16035 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16036 the value is always a floating point constant.
16037
16038 When it is an integer, a CONST_DOUBLE is used whenever
16039 the constant requires 2 HWIs to be adequately represented.
16040 We output CONST_DOUBLEs as blocks. */
16041 if (mode == VOIDmode
16042 || (GET_MODE (rtl) == VOIDmode
16043 && maybe_ne (GET_MODE_BITSIZE (mode),
16044 HOST_BITS_PER_DOUBLE_INT)))
16045 break;
16046 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16047 if (type_die == NULL)
16048 return NULL;
16049 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16050 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16051 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16052 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16053 #if TARGET_SUPPORTS_WIDE_INT == 0
16054 if (!SCALAR_FLOAT_MODE_P (mode))
16055 {
16056 mem_loc_result->dw_loc_oprnd2.val_class
16057 = dw_val_class_const_double;
16058 mem_loc_result->dw_loc_oprnd2.v.val_double
16059 = rtx_to_double_int (rtl);
16060 }
16061 else
16062 #endif
16063 {
16064 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16065 unsigned int length = GET_MODE_SIZE (float_mode);
16066 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16067
16068 insert_float (rtl, array);
16069 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16070 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16071 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16072 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16073 }
16074 }
16075 break;
16076
16077 case CONST_WIDE_INT:
16078 if (!dwarf_strict || dwarf_version >= 5)
16079 {
16080 dw_die_ref type_die;
16081
16082 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16083 if (type_die == NULL)
16084 return NULL;
16085 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16086 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16087 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16088 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16089 mem_loc_result->dw_loc_oprnd2.val_class
16090 = dw_val_class_wide_int;
16091 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16092 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16093 }
16094 break;
16095
16096 case CONST_POLY_INT:
16097 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16098 break;
16099
16100 case EQ:
16101 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16102 break;
16103
16104 case GE:
16105 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16106 break;
16107
16108 case GT:
16109 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16110 break;
16111
16112 case LE:
16113 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16114 break;
16115
16116 case LT:
16117 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16118 break;
16119
16120 case NE:
16121 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16122 break;
16123
16124 case GEU:
16125 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16126 break;
16127
16128 case GTU:
16129 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16130 break;
16131
16132 case LEU:
16133 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16134 break;
16135
16136 case LTU:
16137 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16138 break;
16139
16140 case UMIN:
16141 case UMAX:
16142 if (!SCALAR_INT_MODE_P (mode))
16143 break;
16144 /* FALLTHRU */
16145 case SMIN:
16146 case SMAX:
16147 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16148 break;
16149
16150 case ZERO_EXTRACT:
16151 case SIGN_EXTRACT:
16152 if (CONST_INT_P (XEXP (rtl, 1))
16153 && CONST_INT_P (XEXP (rtl, 2))
16154 && is_a <scalar_int_mode> (mode, &int_mode)
16155 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16156 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16157 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16158 && ((unsigned) INTVAL (XEXP (rtl, 1))
16159 + (unsigned) INTVAL (XEXP (rtl, 2))
16160 <= GET_MODE_BITSIZE (int_mode)))
16161 {
16162 int shift, size;
16163 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16164 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16165 if (op0 == 0)
16166 break;
16167 if (GET_CODE (rtl) == SIGN_EXTRACT)
16168 op = DW_OP_shra;
16169 else
16170 op = DW_OP_shr;
16171 mem_loc_result = op0;
16172 size = INTVAL (XEXP (rtl, 1));
16173 shift = INTVAL (XEXP (rtl, 2));
16174 if (BITS_BIG_ENDIAN)
16175 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16176 if (shift + size != (int) DWARF2_ADDR_SIZE)
16177 {
16178 add_loc_descr (&mem_loc_result,
16179 int_loc_descriptor (DWARF2_ADDR_SIZE
16180 - shift - size));
16181 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16182 }
16183 if (size != (int) DWARF2_ADDR_SIZE)
16184 {
16185 add_loc_descr (&mem_loc_result,
16186 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16187 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16188 }
16189 }
16190 break;
16191
16192 case IF_THEN_ELSE:
16193 {
16194 dw_loc_descr_ref op2, bra_node, drop_node;
16195 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16196 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16197 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16198 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16199 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16200 VAR_INIT_STATUS_INITIALIZED);
16201 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16202 VAR_INIT_STATUS_INITIALIZED);
16203 if (op0 == NULL || op1 == NULL || op2 == NULL)
16204 break;
16205
16206 mem_loc_result = op1;
16207 add_loc_descr (&mem_loc_result, op2);
16208 add_loc_descr (&mem_loc_result, op0);
16209 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16210 add_loc_descr (&mem_loc_result, bra_node);
16211 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16212 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16213 add_loc_descr (&mem_loc_result, drop_node);
16214 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16215 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16216 }
16217 break;
16218
16219 case FLOAT_EXTEND:
16220 case FLOAT_TRUNCATE:
16221 case FLOAT:
16222 case UNSIGNED_FLOAT:
16223 case FIX:
16224 case UNSIGNED_FIX:
16225 if (!dwarf_strict || dwarf_version >= 5)
16226 {
16227 dw_die_ref type_die;
16228 dw_loc_descr_ref cvt;
16229
16230 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16231 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16232 if (op0 == NULL)
16233 break;
16234 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16235 && (GET_CODE (rtl) == FLOAT
16236 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16237 {
16238 type_die = base_type_for_mode (int_mode,
16239 GET_CODE (rtl) == UNSIGNED_FLOAT);
16240 if (type_die == NULL)
16241 break;
16242 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16243 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16244 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16245 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16246 add_loc_descr (&op0, cvt);
16247 }
16248 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16249 if (type_die == NULL)
16250 break;
16251 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16252 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16253 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16254 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16255 add_loc_descr (&op0, cvt);
16256 if (is_a <scalar_int_mode> (mode, &int_mode)
16257 && (GET_CODE (rtl) == FIX
16258 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16259 {
16260 op0 = convert_descriptor_to_mode (int_mode, op0);
16261 if (op0 == NULL)
16262 break;
16263 }
16264 mem_loc_result = op0;
16265 }
16266 break;
16267
16268 case CLZ:
16269 case CTZ:
16270 case FFS:
16271 if (is_a <scalar_int_mode> (mode, &int_mode))
16272 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16273 break;
16274
16275 case POPCOUNT:
16276 case PARITY:
16277 if (is_a <scalar_int_mode> (mode, &int_mode))
16278 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16279 break;
16280
16281 case BSWAP:
16282 if (is_a <scalar_int_mode> (mode, &int_mode))
16283 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16284 break;
16285
16286 case ROTATE:
16287 case ROTATERT:
16288 if (is_a <scalar_int_mode> (mode, &int_mode))
16289 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16290 break;
16291
16292 case COMPARE:
16293 /* In theory, we could implement the above. */
16294 /* DWARF cannot represent the unsigned compare operations
16295 natively. */
16296 case SS_MULT:
16297 case US_MULT:
16298 case SS_DIV:
16299 case US_DIV:
16300 case SS_PLUS:
16301 case US_PLUS:
16302 case SS_MINUS:
16303 case US_MINUS:
16304 case SS_NEG:
16305 case US_NEG:
16306 case SS_ABS:
16307 case SS_ASHIFT:
16308 case US_ASHIFT:
16309 case SS_TRUNCATE:
16310 case US_TRUNCATE:
16311 case UNORDERED:
16312 case ORDERED:
16313 case UNEQ:
16314 case UNGE:
16315 case UNGT:
16316 case UNLE:
16317 case UNLT:
16318 case LTGT:
16319 case FRACT_CONVERT:
16320 case UNSIGNED_FRACT_CONVERT:
16321 case SAT_FRACT:
16322 case UNSIGNED_SAT_FRACT:
16323 case SQRT:
16324 case ASM_OPERANDS:
16325 case VEC_MERGE:
16326 case VEC_SELECT:
16327 case VEC_CONCAT:
16328 case VEC_DUPLICATE:
16329 case VEC_SERIES:
16330 case UNSPEC:
16331 case HIGH:
16332 case FMA:
16333 case STRICT_LOW_PART:
16334 case CONST_VECTOR:
16335 case CONST_FIXED:
16336 case CLRSB:
16337 case CLOBBER:
16338 case CLOBBER_HIGH:
16339 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16340 can't express it in the debug info. This can happen e.g. with some
16341 TLS UNSPECs. */
16342 break;
16343
16344 case CONST_STRING:
16345 resolve_one_addr (&rtl);
16346 goto symref;
16347
16348 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16349 the expression. An UNSPEC rtx represents a raw DWARF operation,
16350 new_loc_descr is called for it to build the operation directly.
16351 Otherwise mem_loc_descriptor is called recursively. */
16352 case PARALLEL:
16353 {
16354 int index = 0;
16355 dw_loc_descr_ref exp_result = NULL;
16356
16357 for (; index < XVECLEN (rtl, 0); index++)
16358 {
16359 rtx elem = XVECEXP (rtl, 0, index);
16360 if (GET_CODE (elem) == UNSPEC)
16361 {
16362 /* Each DWARF operation UNSPEC contain two operands, if
16363 one operand is not used for the operation, const0_rtx is
16364 passed. */
16365 gcc_assert (XVECLEN (elem, 0) == 2);
16366
16367 HOST_WIDE_INT dw_op = XINT (elem, 1);
16368 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16369 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16370 exp_result
16371 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16372 oprnd2);
16373 }
16374 else
16375 exp_result
16376 = mem_loc_descriptor (elem, mode, mem_mode,
16377 VAR_INIT_STATUS_INITIALIZED);
16378
16379 if (!mem_loc_result)
16380 mem_loc_result = exp_result;
16381 else
16382 add_loc_descr (&mem_loc_result, exp_result);
16383 }
16384
16385 break;
16386 }
16387
16388 default:
16389 if (flag_checking)
16390 {
16391 print_rtl (stderr, rtl);
16392 gcc_unreachable ();
16393 }
16394 break;
16395 }
16396
16397 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16398 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16399
16400 return mem_loc_result;
16401 }
16402
16403 /* Return a descriptor that describes the concatenation of two locations.
16404 This is typically a complex variable. */
16405
16406 static dw_loc_descr_ref
16407 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16408 {
16409 /* At present we only track constant-sized pieces. */
16410 unsigned int size0, size1;
16411 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16412 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16413 return 0;
16414
16415 dw_loc_descr_ref cc_loc_result = NULL;
16416 dw_loc_descr_ref x0_ref
16417 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16418 dw_loc_descr_ref x1_ref
16419 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16420
16421 if (x0_ref == 0 || x1_ref == 0)
16422 return 0;
16423
16424 cc_loc_result = x0_ref;
16425 add_loc_descr_op_piece (&cc_loc_result, size0);
16426
16427 add_loc_descr (&cc_loc_result, x1_ref);
16428 add_loc_descr_op_piece (&cc_loc_result, size1);
16429
16430 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16431 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16432
16433 return cc_loc_result;
16434 }
16435
16436 /* Return a descriptor that describes the concatenation of N
16437 locations. */
16438
16439 static dw_loc_descr_ref
16440 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16441 {
16442 unsigned int i;
16443 dw_loc_descr_ref cc_loc_result = NULL;
16444 unsigned int n = XVECLEN (concatn, 0);
16445 unsigned int size;
16446
16447 for (i = 0; i < n; ++i)
16448 {
16449 dw_loc_descr_ref ref;
16450 rtx x = XVECEXP (concatn, 0, i);
16451
16452 /* At present we only track constant-sized pieces. */
16453 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16454 return NULL;
16455
16456 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16457 if (ref == NULL)
16458 return NULL;
16459
16460 add_loc_descr (&cc_loc_result, ref);
16461 add_loc_descr_op_piece (&cc_loc_result, size);
16462 }
16463
16464 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16465 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16466
16467 return cc_loc_result;
16468 }
16469
16470 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16471 for DEBUG_IMPLICIT_PTR RTL. */
16472
16473 static dw_loc_descr_ref
16474 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16475 {
16476 dw_loc_descr_ref ret;
16477 dw_die_ref ref;
16478
16479 if (dwarf_strict && dwarf_version < 5)
16480 return NULL;
16481 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16482 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16483 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16484 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16485 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16486 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16487 if (ref)
16488 {
16489 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16490 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16491 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16492 }
16493 else
16494 {
16495 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16496 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16497 }
16498 return ret;
16499 }
16500
16501 /* Output a proper Dwarf location descriptor for a variable or parameter
16502 which is either allocated in a register or in a memory location. For a
16503 register, we just generate an OP_REG and the register number. For a
16504 memory location we provide a Dwarf postfix expression describing how to
16505 generate the (dynamic) address of the object onto the address stack.
16506
16507 MODE is mode of the decl if this loc_descriptor is going to be used in
16508 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16509 allowed, VOIDmode otherwise.
16510
16511 If we don't know how to describe it, return 0. */
16512
16513 static dw_loc_descr_ref
16514 loc_descriptor (rtx rtl, machine_mode mode,
16515 enum var_init_status initialized)
16516 {
16517 dw_loc_descr_ref loc_result = NULL;
16518 scalar_int_mode int_mode;
16519
16520 switch (GET_CODE (rtl))
16521 {
16522 case SUBREG:
16523 /* The case of a subreg may arise when we have a local (register)
16524 variable or a formal (register) parameter which doesn't quite fill
16525 up an entire register. For now, just assume that it is
16526 legitimate to make the Dwarf info refer to the whole register which
16527 contains the given subreg. */
16528 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16529 loc_result = loc_descriptor (SUBREG_REG (rtl),
16530 GET_MODE (SUBREG_REG (rtl)), initialized);
16531 else
16532 goto do_default;
16533 break;
16534
16535 case REG:
16536 loc_result = reg_loc_descriptor (rtl, initialized);
16537 break;
16538
16539 case MEM:
16540 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16541 GET_MODE (rtl), initialized);
16542 if (loc_result == NULL)
16543 loc_result = tls_mem_loc_descriptor (rtl);
16544 if (loc_result == NULL)
16545 {
16546 rtx new_rtl = avoid_constant_pool_reference (rtl);
16547 if (new_rtl != rtl)
16548 loc_result = loc_descriptor (new_rtl, mode, initialized);
16549 }
16550 break;
16551
16552 case CONCAT:
16553 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16554 initialized);
16555 break;
16556
16557 case CONCATN:
16558 loc_result = concatn_loc_descriptor (rtl, initialized);
16559 break;
16560
16561 case VAR_LOCATION:
16562 /* Single part. */
16563 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16564 {
16565 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16566 if (GET_CODE (loc) == EXPR_LIST)
16567 loc = XEXP (loc, 0);
16568 loc_result = loc_descriptor (loc, mode, initialized);
16569 break;
16570 }
16571
16572 rtl = XEXP (rtl, 1);
16573 /* FALLTHRU */
16574
16575 case PARALLEL:
16576 {
16577 rtvec par_elems = XVEC (rtl, 0);
16578 int num_elem = GET_NUM_ELEM (par_elems);
16579 machine_mode mode;
16580 int i, size;
16581
16582 /* Create the first one, so we have something to add to. */
16583 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16584 VOIDmode, initialized);
16585 if (loc_result == NULL)
16586 return NULL;
16587 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16588 /* At present we only track constant-sized pieces. */
16589 if (!GET_MODE_SIZE (mode).is_constant (&size))
16590 return NULL;
16591 add_loc_descr_op_piece (&loc_result, size);
16592 for (i = 1; i < num_elem; i++)
16593 {
16594 dw_loc_descr_ref temp;
16595
16596 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16597 VOIDmode, initialized);
16598 if (temp == NULL)
16599 return NULL;
16600 add_loc_descr (&loc_result, temp);
16601 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16602 /* At present we only track constant-sized pieces. */
16603 if (!GET_MODE_SIZE (mode).is_constant (&size))
16604 return NULL;
16605 add_loc_descr_op_piece (&loc_result, size);
16606 }
16607 }
16608 break;
16609
16610 case CONST_INT:
16611 if (mode != VOIDmode && mode != BLKmode)
16612 {
16613 int_mode = as_a <scalar_int_mode> (mode);
16614 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16615 INTVAL (rtl));
16616 }
16617 break;
16618
16619 case CONST_DOUBLE:
16620 if (mode == VOIDmode)
16621 mode = GET_MODE (rtl);
16622
16623 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16624 {
16625 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16626
16627 /* Note that a CONST_DOUBLE rtx could represent either an integer
16628 or a floating-point constant. A CONST_DOUBLE is used whenever
16629 the constant requires more than one word in order to be
16630 adequately represented. We output CONST_DOUBLEs as blocks. */
16631 scalar_mode smode = as_a <scalar_mode> (mode);
16632 loc_result = new_loc_descr (DW_OP_implicit_value,
16633 GET_MODE_SIZE (smode), 0);
16634 #if TARGET_SUPPORTS_WIDE_INT == 0
16635 if (!SCALAR_FLOAT_MODE_P (smode))
16636 {
16637 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16638 loc_result->dw_loc_oprnd2.v.val_double
16639 = rtx_to_double_int (rtl);
16640 }
16641 else
16642 #endif
16643 {
16644 unsigned int length = GET_MODE_SIZE (smode);
16645 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16646
16647 insert_float (rtl, array);
16648 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16649 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16650 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16651 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16652 }
16653 }
16654 break;
16655
16656 case CONST_WIDE_INT:
16657 if (mode == VOIDmode)
16658 mode = GET_MODE (rtl);
16659
16660 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16661 {
16662 int_mode = as_a <scalar_int_mode> (mode);
16663 loc_result = new_loc_descr (DW_OP_implicit_value,
16664 GET_MODE_SIZE (int_mode), 0);
16665 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16666 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16667 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16668 }
16669 break;
16670
16671 case CONST_VECTOR:
16672 if (mode == VOIDmode)
16673 mode = GET_MODE (rtl);
16674
16675 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16676 {
16677 unsigned int length;
16678 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16679 return NULL;
16680
16681 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16682 unsigned char *array
16683 = ggc_vec_alloc<unsigned char> (length * elt_size);
16684 unsigned int i;
16685 unsigned char *p;
16686 machine_mode imode = GET_MODE_INNER (mode);
16687
16688 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16689 switch (GET_MODE_CLASS (mode))
16690 {
16691 case MODE_VECTOR_INT:
16692 for (i = 0, p = array; i < length; i++, p += elt_size)
16693 {
16694 rtx elt = CONST_VECTOR_ELT (rtl, i);
16695 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16696 }
16697 break;
16698
16699 case MODE_VECTOR_FLOAT:
16700 for (i = 0, p = array; i < length; i++, p += elt_size)
16701 {
16702 rtx elt = CONST_VECTOR_ELT (rtl, i);
16703 insert_float (elt, p);
16704 }
16705 break;
16706
16707 default:
16708 gcc_unreachable ();
16709 }
16710
16711 loc_result = new_loc_descr (DW_OP_implicit_value,
16712 length * elt_size, 0);
16713 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16714 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16715 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16716 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16717 }
16718 break;
16719
16720 case CONST:
16721 if (mode == VOIDmode
16722 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16723 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16724 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16725 {
16726 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16727 break;
16728 }
16729 /* FALLTHROUGH */
16730 case SYMBOL_REF:
16731 if (!const_ok_for_output (rtl))
16732 break;
16733 /* FALLTHROUGH */
16734 case LABEL_REF:
16735 if (is_a <scalar_int_mode> (mode, &int_mode)
16736 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16737 && (dwarf_version >= 4 || !dwarf_strict))
16738 {
16739 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16740 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16741 vec_safe_push (used_rtx_array, rtl);
16742 }
16743 break;
16744
16745 case DEBUG_IMPLICIT_PTR:
16746 loc_result = implicit_ptr_descriptor (rtl, 0);
16747 break;
16748
16749 case PLUS:
16750 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16751 && CONST_INT_P (XEXP (rtl, 1)))
16752 {
16753 loc_result
16754 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16755 break;
16756 }
16757 /* FALLTHRU */
16758 do_default:
16759 default:
16760 if ((is_a <scalar_int_mode> (mode, &int_mode)
16761 && GET_MODE (rtl) == int_mode
16762 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16763 && dwarf_version >= 4)
16764 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16765 {
16766 /* Value expression. */
16767 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16768 if (loc_result)
16769 add_loc_descr (&loc_result,
16770 new_loc_descr (DW_OP_stack_value, 0, 0));
16771 }
16772 break;
16773 }
16774
16775 return loc_result;
16776 }
16777
16778 /* We need to figure out what section we should use as the base for the
16779 address ranges where a given location is valid.
16780 1. If this particular DECL has a section associated with it, use that.
16781 2. If this function has a section associated with it, use that.
16782 3. Otherwise, use the text section.
16783 XXX: If you split a variable across multiple sections, we won't notice. */
16784
16785 static const char *
16786 secname_for_decl (const_tree decl)
16787 {
16788 const char *secname;
16789
16790 if (VAR_OR_FUNCTION_DECL_P (decl)
16791 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16792 && DECL_SECTION_NAME (decl))
16793 secname = DECL_SECTION_NAME (decl);
16794 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16795 secname = DECL_SECTION_NAME (current_function_decl);
16796 else if (cfun && in_cold_section_p)
16797 secname = crtl->subsections.cold_section_label;
16798 else
16799 secname = text_section_label;
16800
16801 return secname;
16802 }
16803
16804 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16805
16806 static bool
16807 decl_by_reference_p (tree decl)
16808 {
16809 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16810 || VAR_P (decl))
16811 && DECL_BY_REFERENCE (decl));
16812 }
16813
16814 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16815 for VARLOC. */
16816
16817 static dw_loc_descr_ref
16818 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16819 enum var_init_status initialized)
16820 {
16821 int have_address = 0;
16822 dw_loc_descr_ref descr;
16823 machine_mode mode;
16824
16825 if (want_address != 2)
16826 {
16827 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16828 /* Single part. */
16829 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16830 {
16831 varloc = PAT_VAR_LOCATION_LOC (varloc);
16832 if (GET_CODE (varloc) == EXPR_LIST)
16833 varloc = XEXP (varloc, 0);
16834 mode = GET_MODE (varloc);
16835 if (MEM_P (varloc))
16836 {
16837 rtx addr = XEXP (varloc, 0);
16838 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16839 mode, initialized);
16840 if (descr)
16841 have_address = 1;
16842 else
16843 {
16844 rtx x = avoid_constant_pool_reference (varloc);
16845 if (x != varloc)
16846 descr = mem_loc_descriptor (x, mode, VOIDmode,
16847 initialized);
16848 }
16849 }
16850 else
16851 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16852 }
16853 else
16854 return 0;
16855 }
16856 else
16857 {
16858 if (GET_CODE (varloc) == VAR_LOCATION)
16859 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16860 else
16861 mode = DECL_MODE (loc);
16862 descr = loc_descriptor (varloc, mode, initialized);
16863 have_address = 1;
16864 }
16865
16866 if (!descr)
16867 return 0;
16868
16869 if (want_address == 2 && !have_address
16870 && (dwarf_version >= 4 || !dwarf_strict))
16871 {
16872 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16873 {
16874 expansion_failed (loc, NULL_RTX,
16875 "DWARF address size mismatch");
16876 return 0;
16877 }
16878 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16879 have_address = 1;
16880 }
16881 /* Show if we can't fill the request for an address. */
16882 if (want_address && !have_address)
16883 {
16884 expansion_failed (loc, NULL_RTX,
16885 "Want address and only have value");
16886 return 0;
16887 }
16888
16889 /* If we've got an address and don't want one, dereference. */
16890 if (!want_address && have_address)
16891 {
16892 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16893 enum dwarf_location_atom op;
16894
16895 if (size > DWARF2_ADDR_SIZE || size == -1)
16896 {
16897 expansion_failed (loc, NULL_RTX,
16898 "DWARF address size mismatch");
16899 return 0;
16900 }
16901 else if (size == DWARF2_ADDR_SIZE)
16902 op = DW_OP_deref;
16903 else
16904 op = DW_OP_deref_size;
16905
16906 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16907 }
16908
16909 return descr;
16910 }
16911
16912 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16913 if it is not possible. */
16914
16915 static dw_loc_descr_ref
16916 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16917 {
16918 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16919 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16920 else if (dwarf_version >= 3 || !dwarf_strict)
16921 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16922 else
16923 return NULL;
16924 }
16925
16926 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16927 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16928
16929 static dw_loc_descr_ref
16930 dw_sra_loc_expr (tree decl, rtx loc)
16931 {
16932 rtx p;
16933 unsigned HOST_WIDE_INT padsize = 0;
16934 dw_loc_descr_ref descr, *descr_tail;
16935 unsigned HOST_WIDE_INT decl_size;
16936 rtx varloc;
16937 enum var_init_status initialized;
16938
16939 if (DECL_SIZE (decl) == NULL
16940 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16941 return NULL;
16942
16943 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16944 descr = NULL;
16945 descr_tail = &descr;
16946
16947 for (p = loc; p; p = XEXP (p, 1))
16948 {
16949 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16950 rtx loc_note = *decl_piece_varloc_ptr (p);
16951 dw_loc_descr_ref cur_descr;
16952 dw_loc_descr_ref *tail, last = NULL;
16953 unsigned HOST_WIDE_INT opsize = 0;
16954
16955 if (loc_note == NULL_RTX
16956 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16957 {
16958 padsize += bitsize;
16959 continue;
16960 }
16961 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16962 varloc = NOTE_VAR_LOCATION (loc_note);
16963 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16964 if (cur_descr == NULL)
16965 {
16966 padsize += bitsize;
16967 continue;
16968 }
16969
16970 /* Check that cur_descr either doesn't use
16971 DW_OP_*piece operations, or their sum is equal
16972 to bitsize. Otherwise we can't embed it. */
16973 for (tail = &cur_descr; *tail != NULL;
16974 tail = &(*tail)->dw_loc_next)
16975 if ((*tail)->dw_loc_opc == DW_OP_piece)
16976 {
16977 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16978 * BITS_PER_UNIT;
16979 last = *tail;
16980 }
16981 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16982 {
16983 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16984 last = *tail;
16985 }
16986
16987 if (last != NULL && opsize != bitsize)
16988 {
16989 padsize += bitsize;
16990 /* Discard the current piece of the descriptor and release any
16991 addr_table entries it uses. */
16992 remove_loc_list_addr_table_entries (cur_descr);
16993 continue;
16994 }
16995
16996 /* If there is a hole, add DW_OP_*piece after empty DWARF
16997 expression, which means that those bits are optimized out. */
16998 if (padsize)
16999 {
17000 if (padsize > decl_size)
17001 {
17002 remove_loc_list_addr_table_entries (cur_descr);
17003 goto discard_descr;
17004 }
17005 decl_size -= padsize;
17006 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
17007 if (*descr_tail == NULL)
17008 {
17009 remove_loc_list_addr_table_entries (cur_descr);
17010 goto discard_descr;
17011 }
17012 descr_tail = &(*descr_tail)->dw_loc_next;
17013 padsize = 0;
17014 }
17015 *descr_tail = cur_descr;
17016 descr_tail = tail;
17017 if (bitsize > decl_size)
17018 goto discard_descr;
17019 decl_size -= bitsize;
17020 if (last == NULL)
17021 {
17022 HOST_WIDE_INT offset = 0;
17023 if (GET_CODE (varloc) == VAR_LOCATION
17024 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17025 {
17026 varloc = PAT_VAR_LOCATION_LOC (varloc);
17027 if (GET_CODE (varloc) == EXPR_LIST)
17028 varloc = XEXP (varloc, 0);
17029 }
17030 do
17031 {
17032 if (GET_CODE (varloc) == CONST
17033 || GET_CODE (varloc) == SIGN_EXTEND
17034 || GET_CODE (varloc) == ZERO_EXTEND)
17035 varloc = XEXP (varloc, 0);
17036 else if (GET_CODE (varloc) == SUBREG)
17037 varloc = SUBREG_REG (varloc);
17038 else
17039 break;
17040 }
17041 while (1);
17042 /* DW_OP_bit_size offset should be zero for register
17043 or implicit location descriptions and empty location
17044 descriptions, but for memory addresses needs big endian
17045 adjustment. */
17046 if (MEM_P (varloc))
17047 {
17048 unsigned HOST_WIDE_INT memsize;
17049 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17050 goto discard_descr;
17051 memsize *= BITS_PER_UNIT;
17052 if (memsize != bitsize)
17053 {
17054 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17055 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17056 goto discard_descr;
17057 if (memsize < bitsize)
17058 goto discard_descr;
17059 if (BITS_BIG_ENDIAN)
17060 offset = memsize - bitsize;
17061 }
17062 }
17063
17064 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17065 if (*descr_tail == NULL)
17066 goto discard_descr;
17067 descr_tail = &(*descr_tail)->dw_loc_next;
17068 }
17069 }
17070
17071 /* If there were any non-empty expressions, add padding till the end of
17072 the decl. */
17073 if (descr != NULL && decl_size != 0)
17074 {
17075 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17076 if (*descr_tail == NULL)
17077 goto discard_descr;
17078 }
17079 return descr;
17080
17081 discard_descr:
17082 /* Discard the descriptor and release any addr_table entries it uses. */
17083 remove_loc_list_addr_table_entries (descr);
17084 return NULL;
17085 }
17086
17087 /* Return the dwarf representation of the location list LOC_LIST of
17088 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17089 function. */
17090
17091 static dw_loc_list_ref
17092 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17093 {
17094 const char *endname, *secname;
17095 var_loc_view endview;
17096 rtx varloc;
17097 enum var_init_status initialized;
17098 struct var_loc_node *node;
17099 dw_loc_descr_ref descr;
17100 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17101 dw_loc_list_ref list = NULL;
17102 dw_loc_list_ref *listp = &list;
17103
17104 /* Now that we know what section we are using for a base,
17105 actually construct the list of locations.
17106 The first location information is what is passed to the
17107 function that creates the location list, and the remaining
17108 locations just get added on to that list.
17109 Note that we only know the start address for a location
17110 (IE location changes), so to build the range, we use
17111 the range [current location start, next location start].
17112 This means we have to special case the last node, and generate
17113 a range of [last location start, end of function label]. */
17114
17115 if (cfun && crtl->has_bb_partition)
17116 {
17117 bool save_in_cold_section_p = in_cold_section_p;
17118 in_cold_section_p = first_function_block_is_cold;
17119 if (loc_list->last_before_switch == NULL)
17120 in_cold_section_p = !in_cold_section_p;
17121 secname = secname_for_decl (decl);
17122 in_cold_section_p = save_in_cold_section_p;
17123 }
17124 else
17125 secname = secname_for_decl (decl);
17126
17127 for (node = loc_list->first; node; node = node->next)
17128 {
17129 bool range_across_switch = false;
17130 if (GET_CODE (node->loc) == EXPR_LIST
17131 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17132 {
17133 if (GET_CODE (node->loc) == EXPR_LIST)
17134 {
17135 descr = NULL;
17136 /* This requires DW_OP_{,bit_}piece, which is not usable
17137 inside DWARF expressions. */
17138 if (want_address == 2)
17139 descr = dw_sra_loc_expr (decl, node->loc);
17140 }
17141 else
17142 {
17143 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17144 varloc = NOTE_VAR_LOCATION (node->loc);
17145 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17146 }
17147 if (descr)
17148 {
17149 /* If section switch happens in between node->label
17150 and node->next->label (or end of function) and
17151 we can't emit it as a single entry list,
17152 emit two ranges, first one ending at the end
17153 of first partition and second one starting at the
17154 beginning of second partition. */
17155 if (node == loc_list->last_before_switch
17156 && (node != loc_list->first || loc_list->first->next
17157 /* If we are to emit a view number, we will emit
17158 a loclist rather than a single location
17159 expression for the entire function (see
17160 loc_list_has_views), so we have to split the
17161 range that straddles across partitions. */
17162 || !ZERO_VIEW_P (node->view))
17163 && current_function_decl)
17164 {
17165 endname = cfun->fde->dw_fde_end;
17166 endview = 0;
17167 range_across_switch = true;
17168 }
17169 /* The variable has a location between NODE->LABEL and
17170 NODE->NEXT->LABEL. */
17171 else if (node->next)
17172 endname = node->next->label, endview = node->next->view;
17173 /* If the variable has a location at the last label
17174 it keeps its location until the end of function. */
17175 else if (!current_function_decl)
17176 endname = text_end_label, endview = 0;
17177 else
17178 {
17179 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17180 current_function_funcdef_no);
17181 endname = ggc_strdup (label_id);
17182 endview = 0;
17183 }
17184
17185 *listp = new_loc_list (descr, node->label, node->view,
17186 endname, endview, secname);
17187 if (TREE_CODE (decl) == PARM_DECL
17188 && node == loc_list->first
17189 && NOTE_P (node->loc)
17190 && strcmp (node->label, endname) == 0)
17191 (*listp)->force = true;
17192 listp = &(*listp)->dw_loc_next;
17193 }
17194 }
17195
17196 if (cfun
17197 && crtl->has_bb_partition
17198 && node == loc_list->last_before_switch)
17199 {
17200 bool save_in_cold_section_p = in_cold_section_p;
17201 in_cold_section_p = !first_function_block_is_cold;
17202 secname = secname_for_decl (decl);
17203 in_cold_section_p = save_in_cold_section_p;
17204 }
17205
17206 if (range_across_switch)
17207 {
17208 if (GET_CODE (node->loc) == EXPR_LIST)
17209 descr = dw_sra_loc_expr (decl, node->loc);
17210 else
17211 {
17212 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17213 varloc = NOTE_VAR_LOCATION (node->loc);
17214 descr = dw_loc_list_1 (decl, varloc, want_address,
17215 initialized);
17216 }
17217 gcc_assert (descr);
17218 /* The variable has a location between NODE->LABEL and
17219 NODE->NEXT->LABEL. */
17220 if (node->next)
17221 endname = node->next->label, endview = node->next->view;
17222 else
17223 endname = cfun->fde->dw_fde_second_end, endview = 0;
17224 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17225 endname, endview, secname);
17226 listp = &(*listp)->dw_loc_next;
17227 }
17228 }
17229
17230 /* Try to avoid the overhead of a location list emitting a location
17231 expression instead, but only if we didn't have more than one
17232 location entry in the first place. If some entries were not
17233 representable, we don't want to pretend a single entry that was
17234 applies to the entire scope in which the variable is
17235 available. */
17236 if (list && loc_list->first->next)
17237 gen_llsym (list);
17238 else
17239 maybe_gen_llsym (list);
17240
17241 return list;
17242 }
17243
17244 /* Return if the loc_list has only single element and thus can be represented
17245 as location description. */
17246
17247 static bool
17248 single_element_loc_list_p (dw_loc_list_ref list)
17249 {
17250 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17251 return !list->ll_symbol;
17252 }
17253
17254 /* Duplicate a single element of location list. */
17255
17256 static inline dw_loc_descr_ref
17257 copy_loc_descr (dw_loc_descr_ref ref)
17258 {
17259 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17260 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17261 return copy;
17262 }
17263
17264 /* To each location in list LIST append loc descr REF. */
17265
17266 static void
17267 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17268 {
17269 dw_loc_descr_ref copy;
17270 add_loc_descr (&list->expr, ref);
17271 list = list->dw_loc_next;
17272 while (list)
17273 {
17274 copy = copy_loc_descr (ref);
17275 add_loc_descr (&list->expr, copy);
17276 while (copy->dw_loc_next)
17277 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17278 list = list->dw_loc_next;
17279 }
17280 }
17281
17282 /* To each location in list LIST prepend loc descr REF. */
17283
17284 static void
17285 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17286 {
17287 dw_loc_descr_ref copy;
17288 dw_loc_descr_ref ref_end = list->expr;
17289 add_loc_descr (&ref, list->expr);
17290 list->expr = ref;
17291 list = list->dw_loc_next;
17292 while (list)
17293 {
17294 dw_loc_descr_ref end = list->expr;
17295 list->expr = copy = copy_loc_descr (ref);
17296 while (copy->dw_loc_next != ref_end)
17297 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17298 copy->dw_loc_next = end;
17299 list = list->dw_loc_next;
17300 }
17301 }
17302
17303 /* Given two lists RET and LIST
17304 produce location list that is result of adding expression in LIST
17305 to expression in RET on each position in program.
17306 Might be destructive on both RET and LIST.
17307
17308 TODO: We handle only simple cases of RET or LIST having at most one
17309 element. General case would involve sorting the lists in program order
17310 and merging them that will need some additional work.
17311 Adding that will improve quality of debug info especially for SRA-ed
17312 structures. */
17313
17314 static void
17315 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17316 {
17317 if (!list)
17318 return;
17319 if (!*ret)
17320 {
17321 *ret = list;
17322 return;
17323 }
17324 if (!list->dw_loc_next)
17325 {
17326 add_loc_descr_to_each (*ret, list->expr);
17327 return;
17328 }
17329 if (!(*ret)->dw_loc_next)
17330 {
17331 prepend_loc_descr_to_each (list, (*ret)->expr);
17332 *ret = list;
17333 return;
17334 }
17335 expansion_failed (NULL_TREE, NULL_RTX,
17336 "Don't know how to merge two non-trivial"
17337 " location lists.\n");
17338 *ret = NULL;
17339 return;
17340 }
17341
17342 /* LOC is constant expression. Try a luck, look it up in constant
17343 pool and return its loc_descr of its address. */
17344
17345 static dw_loc_descr_ref
17346 cst_pool_loc_descr (tree loc)
17347 {
17348 /* Get an RTL for this, if something has been emitted. */
17349 rtx rtl = lookup_constant_def (loc);
17350
17351 if (!rtl || !MEM_P (rtl))
17352 {
17353 gcc_assert (!rtl);
17354 return 0;
17355 }
17356 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17357
17358 /* TODO: We might get more coverage if we was actually delaying expansion
17359 of all expressions till end of compilation when constant pools are fully
17360 populated. */
17361 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17362 {
17363 expansion_failed (loc, NULL_RTX,
17364 "CST value in contant pool but not marked.");
17365 return 0;
17366 }
17367 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17368 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17369 }
17370
17371 /* Return dw_loc_list representing address of addr_expr LOC
17372 by looking for inner INDIRECT_REF expression and turning
17373 it into simple arithmetics.
17374
17375 See loc_list_from_tree for the meaning of CONTEXT. */
17376
17377 static dw_loc_list_ref
17378 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17379 loc_descr_context *context)
17380 {
17381 tree obj, offset;
17382 poly_int64 bitsize, bitpos, bytepos;
17383 machine_mode mode;
17384 int unsignedp, reversep, volatilep = 0;
17385 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17386
17387 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17388 &bitsize, &bitpos, &offset, &mode,
17389 &unsignedp, &reversep, &volatilep);
17390 STRIP_NOPS (obj);
17391 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17392 {
17393 expansion_failed (loc, NULL_RTX, "bitfield access");
17394 return 0;
17395 }
17396 if (!INDIRECT_REF_P (obj))
17397 {
17398 expansion_failed (obj,
17399 NULL_RTX, "no indirect ref in inner refrence");
17400 return 0;
17401 }
17402 if (!offset && known_eq (bitpos, 0))
17403 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17404 context);
17405 else if (toplev
17406 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17407 && (dwarf_version >= 4 || !dwarf_strict))
17408 {
17409 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17410 if (!list_ret)
17411 return 0;
17412 if (offset)
17413 {
17414 /* Variable offset. */
17415 list_ret1 = loc_list_from_tree (offset, 0, context);
17416 if (list_ret1 == 0)
17417 return 0;
17418 add_loc_list (&list_ret, list_ret1);
17419 if (!list_ret)
17420 return 0;
17421 add_loc_descr_to_each (list_ret,
17422 new_loc_descr (DW_OP_plus, 0, 0));
17423 }
17424 HOST_WIDE_INT value;
17425 if (bytepos.is_constant (&value) && value > 0)
17426 add_loc_descr_to_each (list_ret,
17427 new_loc_descr (DW_OP_plus_uconst, value, 0));
17428 else if (maybe_ne (bytepos, 0))
17429 loc_list_plus_const (list_ret, bytepos);
17430 add_loc_descr_to_each (list_ret,
17431 new_loc_descr (DW_OP_stack_value, 0, 0));
17432 }
17433 return list_ret;
17434 }
17435
17436 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17437 all operations from LOC are nops, move to the last one. Insert in NOPS all
17438 operations that are skipped. */
17439
17440 static void
17441 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17442 hash_set<dw_loc_descr_ref> &nops)
17443 {
17444 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17445 {
17446 nops.add (loc);
17447 loc = loc->dw_loc_next;
17448 }
17449 }
17450
17451 /* Helper for loc_descr_without_nops: free the location description operation
17452 P. */
17453
17454 bool
17455 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17456 {
17457 ggc_free (loc);
17458 return true;
17459 }
17460
17461 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17462 finishes LOC. */
17463
17464 static void
17465 loc_descr_without_nops (dw_loc_descr_ref &loc)
17466 {
17467 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17468 return;
17469
17470 /* Set of all DW_OP_nop operations we remove. */
17471 hash_set<dw_loc_descr_ref> nops;
17472
17473 /* First, strip all prefix NOP operations in order to keep the head of the
17474 operations list. */
17475 loc_descr_to_next_no_nop (loc, nops);
17476
17477 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17478 {
17479 /* For control flow operations: strip "prefix" nops in destination
17480 labels. */
17481 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17482 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17483 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17484 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17485
17486 /* Do the same for the operations that follow, then move to the next
17487 iteration. */
17488 if (cur->dw_loc_next != NULL)
17489 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17490 cur = cur->dw_loc_next;
17491 }
17492
17493 nops.traverse<void *, free_loc_descr> (NULL);
17494 }
17495
17496
17497 struct dwarf_procedure_info;
17498
17499 /* Helper structure for location descriptions generation. */
17500 struct loc_descr_context
17501 {
17502 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17503 NULL_TREE if DW_OP_push_object_address in invalid for this location
17504 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17505 tree context_type;
17506 /* The ..._DECL node that should be translated as a
17507 DW_OP_push_object_address operation. */
17508 tree base_decl;
17509 /* Information about the DWARF procedure we are currently generating. NULL if
17510 we are not generating a DWARF procedure. */
17511 struct dwarf_procedure_info *dpi;
17512 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17513 by consumer. Used for DW_TAG_generic_subrange attributes. */
17514 bool placeholder_arg;
17515 /* True if PLACEHOLDER_EXPR has been seen. */
17516 bool placeholder_seen;
17517 };
17518
17519 /* DWARF procedures generation
17520
17521 DWARF expressions (aka. location descriptions) are used to encode variable
17522 things such as sizes or offsets. Such computations can have redundant parts
17523 that can be factorized in order to reduce the size of the output debug
17524 information. This is the whole point of DWARF procedures.
17525
17526 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17527 already factorized into functions ("size functions") in order to handle very
17528 big and complex types. Such functions are quite simple: they have integral
17529 arguments, they return an integral result and their body contains only a
17530 return statement with arithmetic expressions. This is the only kind of
17531 function we are interested in translating into DWARF procedures, here.
17532
17533 DWARF expressions and DWARF procedure are executed using a stack, so we have
17534 to define some calling convention for them to interact. Let's say that:
17535
17536 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17537 all arguments in reverse order (right-to-left) so that when the DWARF
17538 procedure execution starts, the first argument is the top of the stack.
17539
17540 - Then, when returning, the DWARF procedure must have consumed all arguments
17541 on the stack, must have pushed the result and touched nothing else.
17542
17543 - Each integral argument and the result are integral types can be hold in a
17544 single stack slot.
17545
17546 - We call "frame offset" the number of stack slots that are "under DWARF
17547 procedure control": it includes the arguments slots, the temporaries and
17548 the result slot. Thus, it is equal to the number of arguments when the
17549 procedure execution starts and must be equal to one (the result) when it
17550 returns. */
17551
17552 /* Helper structure used when generating operations for a DWARF procedure. */
17553 struct dwarf_procedure_info
17554 {
17555 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17556 currently translated. */
17557 tree fndecl;
17558 /* The number of arguments FNDECL takes. */
17559 unsigned args_count;
17560 };
17561
17562 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17563 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17564 equate it to this DIE. */
17565
17566 static dw_die_ref
17567 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17568 dw_die_ref parent_die)
17569 {
17570 dw_die_ref dwarf_proc_die;
17571
17572 if ((dwarf_version < 3 && dwarf_strict)
17573 || location == NULL)
17574 return NULL;
17575
17576 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17577 if (fndecl)
17578 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17579 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17580 return dwarf_proc_die;
17581 }
17582
17583 /* Return whether TYPE is a supported type as a DWARF procedure argument
17584 type or return type (we handle only scalar types and pointer types that
17585 aren't wider than the DWARF expression evaluation stack. */
17586
17587 static bool
17588 is_handled_procedure_type (tree type)
17589 {
17590 return ((INTEGRAL_TYPE_P (type)
17591 || TREE_CODE (type) == OFFSET_TYPE
17592 || TREE_CODE (type) == POINTER_TYPE)
17593 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17594 }
17595
17596 /* Helper for resolve_args_picking: do the same but stop when coming across
17597 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17598 offset *before* evaluating the corresponding operation. */
17599
17600 static bool
17601 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17602 struct dwarf_procedure_info *dpi,
17603 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17604 {
17605 /* The "frame_offset" identifier is already used to name a macro... */
17606 unsigned frame_offset_ = initial_frame_offset;
17607 dw_loc_descr_ref l;
17608
17609 for (l = loc; l != NULL;)
17610 {
17611 bool existed;
17612 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17613
17614 /* If we already met this node, there is nothing to compute anymore. */
17615 if (existed)
17616 {
17617 /* Make sure that the stack size is consistent wherever the execution
17618 flow comes from. */
17619 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17620 break;
17621 }
17622 l_frame_offset = frame_offset_;
17623
17624 /* If needed, relocate the picking offset with respect to the frame
17625 offset. */
17626 if (l->frame_offset_rel)
17627 {
17628 unsigned HOST_WIDE_INT off;
17629 switch (l->dw_loc_opc)
17630 {
17631 case DW_OP_pick:
17632 off = l->dw_loc_oprnd1.v.val_unsigned;
17633 break;
17634 case DW_OP_dup:
17635 off = 0;
17636 break;
17637 case DW_OP_over:
17638 off = 1;
17639 break;
17640 default:
17641 gcc_unreachable ();
17642 }
17643 /* frame_offset_ is the size of the current stack frame, including
17644 incoming arguments. Besides, the arguments are pushed
17645 right-to-left. Thus, in order to access the Nth argument from
17646 this operation node, the picking has to skip temporaries *plus*
17647 one stack slot per argument (0 for the first one, 1 for the second
17648 one, etc.).
17649
17650 The targetted argument number (N) is already set as the operand,
17651 and the number of temporaries can be computed with:
17652 frame_offsets_ - dpi->args_count */
17653 off += frame_offset_ - dpi->args_count;
17654
17655 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17656 if (off > 255)
17657 return false;
17658
17659 if (off == 0)
17660 {
17661 l->dw_loc_opc = DW_OP_dup;
17662 l->dw_loc_oprnd1.v.val_unsigned = 0;
17663 }
17664 else if (off == 1)
17665 {
17666 l->dw_loc_opc = DW_OP_over;
17667 l->dw_loc_oprnd1.v.val_unsigned = 0;
17668 }
17669 else
17670 {
17671 l->dw_loc_opc = DW_OP_pick;
17672 l->dw_loc_oprnd1.v.val_unsigned = off;
17673 }
17674 }
17675
17676 /* Update frame_offset according to the effect the current operation has
17677 on the stack. */
17678 switch (l->dw_loc_opc)
17679 {
17680 case DW_OP_deref:
17681 case DW_OP_swap:
17682 case DW_OP_rot:
17683 case DW_OP_abs:
17684 case DW_OP_neg:
17685 case DW_OP_not:
17686 case DW_OP_plus_uconst:
17687 case DW_OP_skip:
17688 case DW_OP_reg0:
17689 case DW_OP_reg1:
17690 case DW_OP_reg2:
17691 case DW_OP_reg3:
17692 case DW_OP_reg4:
17693 case DW_OP_reg5:
17694 case DW_OP_reg6:
17695 case DW_OP_reg7:
17696 case DW_OP_reg8:
17697 case DW_OP_reg9:
17698 case DW_OP_reg10:
17699 case DW_OP_reg11:
17700 case DW_OP_reg12:
17701 case DW_OP_reg13:
17702 case DW_OP_reg14:
17703 case DW_OP_reg15:
17704 case DW_OP_reg16:
17705 case DW_OP_reg17:
17706 case DW_OP_reg18:
17707 case DW_OP_reg19:
17708 case DW_OP_reg20:
17709 case DW_OP_reg21:
17710 case DW_OP_reg22:
17711 case DW_OP_reg23:
17712 case DW_OP_reg24:
17713 case DW_OP_reg25:
17714 case DW_OP_reg26:
17715 case DW_OP_reg27:
17716 case DW_OP_reg28:
17717 case DW_OP_reg29:
17718 case DW_OP_reg30:
17719 case DW_OP_reg31:
17720 case DW_OP_bregx:
17721 case DW_OP_piece:
17722 case DW_OP_deref_size:
17723 case DW_OP_nop:
17724 case DW_OP_bit_piece:
17725 case DW_OP_implicit_value:
17726 case DW_OP_stack_value:
17727 break;
17728
17729 case DW_OP_addr:
17730 case DW_OP_const1u:
17731 case DW_OP_const1s:
17732 case DW_OP_const2u:
17733 case DW_OP_const2s:
17734 case DW_OP_const4u:
17735 case DW_OP_const4s:
17736 case DW_OP_const8u:
17737 case DW_OP_const8s:
17738 case DW_OP_constu:
17739 case DW_OP_consts:
17740 case DW_OP_dup:
17741 case DW_OP_over:
17742 case DW_OP_pick:
17743 case DW_OP_lit0:
17744 case DW_OP_lit1:
17745 case DW_OP_lit2:
17746 case DW_OP_lit3:
17747 case DW_OP_lit4:
17748 case DW_OP_lit5:
17749 case DW_OP_lit6:
17750 case DW_OP_lit7:
17751 case DW_OP_lit8:
17752 case DW_OP_lit9:
17753 case DW_OP_lit10:
17754 case DW_OP_lit11:
17755 case DW_OP_lit12:
17756 case DW_OP_lit13:
17757 case DW_OP_lit14:
17758 case DW_OP_lit15:
17759 case DW_OP_lit16:
17760 case DW_OP_lit17:
17761 case DW_OP_lit18:
17762 case DW_OP_lit19:
17763 case DW_OP_lit20:
17764 case DW_OP_lit21:
17765 case DW_OP_lit22:
17766 case DW_OP_lit23:
17767 case DW_OP_lit24:
17768 case DW_OP_lit25:
17769 case DW_OP_lit26:
17770 case DW_OP_lit27:
17771 case DW_OP_lit28:
17772 case DW_OP_lit29:
17773 case DW_OP_lit30:
17774 case DW_OP_lit31:
17775 case DW_OP_breg0:
17776 case DW_OP_breg1:
17777 case DW_OP_breg2:
17778 case DW_OP_breg3:
17779 case DW_OP_breg4:
17780 case DW_OP_breg5:
17781 case DW_OP_breg6:
17782 case DW_OP_breg7:
17783 case DW_OP_breg8:
17784 case DW_OP_breg9:
17785 case DW_OP_breg10:
17786 case DW_OP_breg11:
17787 case DW_OP_breg12:
17788 case DW_OP_breg13:
17789 case DW_OP_breg14:
17790 case DW_OP_breg15:
17791 case DW_OP_breg16:
17792 case DW_OP_breg17:
17793 case DW_OP_breg18:
17794 case DW_OP_breg19:
17795 case DW_OP_breg20:
17796 case DW_OP_breg21:
17797 case DW_OP_breg22:
17798 case DW_OP_breg23:
17799 case DW_OP_breg24:
17800 case DW_OP_breg25:
17801 case DW_OP_breg26:
17802 case DW_OP_breg27:
17803 case DW_OP_breg28:
17804 case DW_OP_breg29:
17805 case DW_OP_breg30:
17806 case DW_OP_breg31:
17807 case DW_OP_fbreg:
17808 case DW_OP_push_object_address:
17809 case DW_OP_call_frame_cfa:
17810 case DW_OP_GNU_variable_value:
17811 ++frame_offset_;
17812 break;
17813
17814 case DW_OP_drop:
17815 case DW_OP_xderef:
17816 case DW_OP_and:
17817 case DW_OP_div:
17818 case DW_OP_minus:
17819 case DW_OP_mod:
17820 case DW_OP_mul:
17821 case DW_OP_or:
17822 case DW_OP_plus:
17823 case DW_OP_shl:
17824 case DW_OP_shr:
17825 case DW_OP_shra:
17826 case DW_OP_xor:
17827 case DW_OP_bra:
17828 case DW_OP_eq:
17829 case DW_OP_ge:
17830 case DW_OP_gt:
17831 case DW_OP_le:
17832 case DW_OP_lt:
17833 case DW_OP_ne:
17834 case DW_OP_regx:
17835 case DW_OP_xderef_size:
17836 --frame_offset_;
17837 break;
17838
17839 case DW_OP_call2:
17840 case DW_OP_call4:
17841 case DW_OP_call_ref:
17842 {
17843 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17844 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17845
17846 if (stack_usage == NULL)
17847 return false;
17848 frame_offset_ += *stack_usage;
17849 break;
17850 }
17851
17852 case DW_OP_implicit_pointer:
17853 case DW_OP_entry_value:
17854 case DW_OP_const_type:
17855 case DW_OP_regval_type:
17856 case DW_OP_deref_type:
17857 case DW_OP_convert:
17858 case DW_OP_reinterpret:
17859 case DW_OP_form_tls_address:
17860 case DW_OP_GNU_push_tls_address:
17861 case DW_OP_GNU_uninit:
17862 case DW_OP_GNU_encoded_addr:
17863 case DW_OP_GNU_implicit_pointer:
17864 case DW_OP_GNU_entry_value:
17865 case DW_OP_GNU_const_type:
17866 case DW_OP_GNU_regval_type:
17867 case DW_OP_GNU_deref_type:
17868 case DW_OP_GNU_convert:
17869 case DW_OP_GNU_reinterpret:
17870 case DW_OP_GNU_parameter_ref:
17871 /* loc_list_from_tree will probably not output these operations for
17872 size functions, so assume they will not appear here. */
17873 /* Fall through... */
17874
17875 default:
17876 gcc_unreachable ();
17877 }
17878
17879 /* Now, follow the control flow (except subroutine calls). */
17880 switch (l->dw_loc_opc)
17881 {
17882 case DW_OP_bra:
17883 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17884 frame_offsets))
17885 return false;
17886 /* Fall through. */
17887
17888 case DW_OP_skip:
17889 l = l->dw_loc_oprnd1.v.val_loc;
17890 break;
17891
17892 case DW_OP_stack_value:
17893 return true;
17894
17895 default:
17896 l = l->dw_loc_next;
17897 break;
17898 }
17899 }
17900
17901 return true;
17902 }
17903
17904 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17905 operations) in order to resolve the operand of DW_OP_pick operations that
17906 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17907 offset *before* LOC is executed. Return if all relocations were
17908 successful. */
17909
17910 static bool
17911 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17912 struct dwarf_procedure_info *dpi)
17913 {
17914 /* Associate to all visited operations the frame offset *before* evaluating
17915 this operation. */
17916 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17917
17918 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17919 frame_offsets);
17920 }
17921
17922 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17923 Return NULL if it is not possible. */
17924
17925 static dw_die_ref
17926 function_to_dwarf_procedure (tree fndecl)
17927 {
17928 struct loc_descr_context ctx;
17929 struct dwarf_procedure_info dpi;
17930 dw_die_ref dwarf_proc_die;
17931 tree tree_body = DECL_SAVED_TREE (fndecl);
17932 dw_loc_descr_ref loc_body, epilogue;
17933
17934 tree cursor;
17935 unsigned i;
17936
17937 /* Do not generate multiple DWARF procedures for the same function
17938 declaration. */
17939 dwarf_proc_die = lookup_decl_die (fndecl);
17940 if (dwarf_proc_die != NULL)
17941 return dwarf_proc_die;
17942
17943 /* DWARF procedures are available starting with the DWARFv3 standard. */
17944 if (dwarf_version < 3 && dwarf_strict)
17945 return NULL;
17946
17947 /* We handle only functions for which we still have a body, that return a
17948 supported type and that takes arguments with supported types. Note that
17949 there is no point translating functions that return nothing. */
17950 if (tree_body == NULL_TREE
17951 || DECL_RESULT (fndecl) == NULL_TREE
17952 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17953 return NULL;
17954
17955 for (cursor = DECL_ARGUMENTS (fndecl);
17956 cursor != NULL_TREE;
17957 cursor = TREE_CHAIN (cursor))
17958 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17959 return NULL;
17960
17961 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17962 if (TREE_CODE (tree_body) != RETURN_EXPR)
17963 return NULL;
17964 tree_body = TREE_OPERAND (tree_body, 0);
17965 if (TREE_CODE (tree_body) != MODIFY_EXPR
17966 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17967 return NULL;
17968 tree_body = TREE_OPERAND (tree_body, 1);
17969
17970 /* Try to translate the body expression itself. Note that this will probably
17971 cause an infinite recursion if its call graph has a cycle. This is very
17972 unlikely for size functions, however, so don't bother with such things at
17973 the moment. */
17974 ctx.context_type = NULL_TREE;
17975 ctx.base_decl = NULL_TREE;
17976 ctx.dpi = &dpi;
17977 ctx.placeholder_arg = false;
17978 ctx.placeholder_seen = false;
17979 dpi.fndecl = fndecl;
17980 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17981 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17982 if (!loc_body)
17983 return NULL;
17984
17985 /* After evaluating all operands in "loc_body", we should still have on the
17986 stack all arguments plus the desired function result (top of the stack).
17987 Generate code in order to keep only the result in our stack frame. */
17988 epilogue = NULL;
17989 for (i = 0; i < dpi.args_count; ++i)
17990 {
17991 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17992 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17993 op_couple->dw_loc_next->dw_loc_next = epilogue;
17994 epilogue = op_couple;
17995 }
17996 add_loc_descr (&loc_body, epilogue);
17997 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17998 return NULL;
17999
18000 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
18001 because they are considered useful. Now there is an epilogue, they are
18002 not anymore, so give it another try. */
18003 loc_descr_without_nops (loc_body);
18004
18005 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
18006 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
18007 though, given that size functions do not come from source, so they should
18008 not have a dedicated DW_TAG_subprogram DIE. */
18009 dwarf_proc_die
18010 = new_dwarf_proc_die (loc_body, fndecl,
18011 get_context_die (DECL_CONTEXT (fndecl)));
18012
18013 /* The called DWARF procedure consumes one stack slot per argument and
18014 returns one stack slot. */
18015 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18016
18017 return dwarf_proc_die;
18018 }
18019
18020
18021 /* Generate Dwarf location list representing LOC.
18022 If WANT_ADDRESS is false, expression computing LOC will be computed
18023 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18024 if WANT_ADDRESS is 2, expression computing address useable in location
18025 will be returned (i.e. DW_OP_reg can be used
18026 to refer to register values).
18027
18028 CONTEXT provides information to customize the location descriptions
18029 generation. Its context_type field specifies what type is implicitly
18030 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18031 will not be generated.
18032
18033 Its DPI field determines whether we are generating a DWARF expression for a
18034 DWARF procedure, so PARM_DECL references are processed specifically.
18035
18036 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18037 and dpi fields were null. */
18038
18039 static dw_loc_list_ref
18040 loc_list_from_tree_1 (tree loc, int want_address,
18041 struct loc_descr_context *context)
18042 {
18043 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18044 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18045 int have_address = 0;
18046 enum dwarf_location_atom op;
18047
18048 /* ??? Most of the time we do not take proper care for sign/zero
18049 extending the values properly. Hopefully this won't be a real
18050 problem... */
18051
18052 if (context != NULL
18053 && context->base_decl == loc
18054 && want_address == 0)
18055 {
18056 if (dwarf_version >= 3 || !dwarf_strict)
18057 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18058 NULL, 0, NULL, 0, NULL);
18059 else
18060 return NULL;
18061 }
18062
18063 switch (TREE_CODE (loc))
18064 {
18065 case ERROR_MARK:
18066 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18067 return 0;
18068
18069 case PLACEHOLDER_EXPR:
18070 /* This case involves extracting fields from an object to determine the
18071 position of other fields. It is supposed to appear only as the first
18072 operand of COMPONENT_REF nodes and to reference precisely the type
18073 that the context allows. */
18074 if (context != NULL
18075 && TREE_TYPE (loc) == context->context_type
18076 && want_address >= 1)
18077 {
18078 if (dwarf_version >= 3 || !dwarf_strict)
18079 {
18080 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18081 have_address = 1;
18082 break;
18083 }
18084 else
18085 return NULL;
18086 }
18087 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18088 the single argument passed by consumer. */
18089 else if (context != NULL
18090 && context->placeholder_arg
18091 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18092 && want_address == 0)
18093 {
18094 ret = new_loc_descr (DW_OP_pick, 0, 0);
18095 ret->frame_offset_rel = 1;
18096 context->placeholder_seen = true;
18097 break;
18098 }
18099 else
18100 expansion_failed (loc, NULL_RTX,
18101 "PLACEHOLDER_EXPR for an unexpected type");
18102 break;
18103
18104 case CALL_EXPR:
18105 {
18106 const int nargs = call_expr_nargs (loc);
18107 tree callee = get_callee_fndecl (loc);
18108 int i;
18109 dw_die_ref dwarf_proc;
18110
18111 if (callee == NULL_TREE)
18112 goto call_expansion_failed;
18113
18114 /* We handle only functions that return an integer. */
18115 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18116 goto call_expansion_failed;
18117
18118 dwarf_proc = function_to_dwarf_procedure (callee);
18119 if (dwarf_proc == NULL)
18120 goto call_expansion_failed;
18121
18122 /* Evaluate arguments right-to-left so that the first argument will
18123 be the top-most one on the stack. */
18124 for (i = nargs - 1; i >= 0; --i)
18125 {
18126 dw_loc_descr_ref loc_descr
18127 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18128 context);
18129
18130 if (loc_descr == NULL)
18131 goto call_expansion_failed;
18132
18133 add_loc_descr (&ret, loc_descr);
18134 }
18135
18136 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18137 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18138 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18139 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18140 add_loc_descr (&ret, ret1);
18141 break;
18142
18143 call_expansion_failed:
18144 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18145 /* There are no opcodes for these operations. */
18146 return 0;
18147 }
18148
18149 case PREINCREMENT_EXPR:
18150 case PREDECREMENT_EXPR:
18151 case POSTINCREMENT_EXPR:
18152 case POSTDECREMENT_EXPR:
18153 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18154 /* There are no opcodes for these operations. */
18155 return 0;
18156
18157 case ADDR_EXPR:
18158 /* If we already want an address, see if there is INDIRECT_REF inside
18159 e.g. for &this->field. */
18160 if (want_address)
18161 {
18162 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18163 (loc, want_address == 2, context);
18164 if (list_ret)
18165 have_address = 1;
18166 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18167 && (ret = cst_pool_loc_descr (loc)))
18168 have_address = 1;
18169 }
18170 /* Otherwise, process the argument and look for the address. */
18171 if (!list_ret && !ret)
18172 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18173 else
18174 {
18175 if (want_address)
18176 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18177 return NULL;
18178 }
18179 break;
18180
18181 case VAR_DECL:
18182 if (DECL_THREAD_LOCAL_P (loc))
18183 {
18184 rtx rtl;
18185 enum dwarf_location_atom tls_op;
18186 enum dtprel_bool dtprel = dtprel_false;
18187
18188 if (targetm.have_tls)
18189 {
18190 /* If this is not defined, we have no way to emit the
18191 data. */
18192 if (!targetm.asm_out.output_dwarf_dtprel)
18193 return 0;
18194
18195 /* The way DW_OP_GNU_push_tls_address is specified, we
18196 can only look up addresses of objects in the current
18197 module. We used DW_OP_addr as first op, but that's
18198 wrong, because DW_OP_addr is relocated by the debug
18199 info consumer, while DW_OP_GNU_push_tls_address
18200 operand shouldn't be. */
18201 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18202 return 0;
18203 dtprel = dtprel_true;
18204 /* We check for DWARF 5 here because gdb did not implement
18205 DW_OP_form_tls_address until after 7.12. */
18206 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18207 : DW_OP_GNU_push_tls_address);
18208 }
18209 else
18210 {
18211 if (!targetm.emutls.debug_form_tls_address
18212 || !(dwarf_version >= 3 || !dwarf_strict))
18213 return 0;
18214 /* We stuffed the control variable into the DECL_VALUE_EXPR
18215 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18216 no longer appear in gimple code. We used the control
18217 variable in specific so that we could pick it up here. */
18218 loc = DECL_VALUE_EXPR (loc);
18219 tls_op = DW_OP_form_tls_address;
18220 }
18221
18222 rtl = rtl_for_decl_location (loc);
18223 if (rtl == NULL_RTX)
18224 return 0;
18225
18226 if (!MEM_P (rtl))
18227 return 0;
18228 rtl = XEXP (rtl, 0);
18229 if (! CONSTANT_P (rtl))
18230 return 0;
18231
18232 ret = new_addr_loc_descr (rtl, dtprel);
18233 ret1 = new_loc_descr (tls_op, 0, 0);
18234 add_loc_descr (&ret, ret1);
18235
18236 have_address = 1;
18237 break;
18238 }
18239 /* FALLTHRU */
18240
18241 case PARM_DECL:
18242 if (context != NULL && context->dpi != NULL
18243 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18244 {
18245 /* We are generating code for a DWARF procedure and we want to access
18246 one of its arguments: find the appropriate argument offset and let
18247 the resolve_args_picking pass compute the offset that complies
18248 with the stack frame size. */
18249 unsigned i = 0;
18250 tree cursor;
18251
18252 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18253 cursor != NULL_TREE && cursor != loc;
18254 cursor = TREE_CHAIN (cursor), ++i)
18255 ;
18256 /* If we are translating a DWARF procedure, all referenced parameters
18257 must belong to the current function. */
18258 gcc_assert (cursor != NULL_TREE);
18259
18260 ret = new_loc_descr (DW_OP_pick, i, 0);
18261 ret->frame_offset_rel = 1;
18262 break;
18263 }
18264 /* FALLTHRU */
18265
18266 case RESULT_DECL:
18267 if (DECL_HAS_VALUE_EXPR_P (loc))
18268 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18269 want_address, context);
18270 /* FALLTHRU */
18271
18272 case FUNCTION_DECL:
18273 {
18274 rtx rtl;
18275 var_loc_list *loc_list = lookup_decl_loc (loc);
18276
18277 if (loc_list && loc_list->first)
18278 {
18279 list_ret = dw_loc_list (loc_list, loc, want_address);
18280 have_address = want_address != 0;
18281 break;
18282 }
18283 rtl = rtl_for_decl_location (loc);
18284 if (rtl == NULL_RTX)
18285 {
18286 if (TREE_CODE (loc) != FUNCTION_DECL
18287 && early_dwarf
18288 && current_function_decl
18289 && want_address != 1
18290 && ! DECL_IGNORED_P (loc)
18291 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18292 || POINTER_TYPE_P (TREE_TYPE (loc)))
18293 && DECL_CONTEXT (loc) == current_function_decl
18294 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18295 <= DWARF2_ADDR_SIZE))
18296 {
18297 dw_die_ref ref = lookup_decl_die (loc);
18298 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18299 if (ref)
18300 {
18301 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18302 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18303 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18304 }
18305 else
18306 {
18307 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18308 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18309 }
18310 break;
18311 }
18312 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18313 return 0;
18314 }
18315 else if (CONST_INT_P (rtl))
18316 {
18317 HOST_WIDE_INT val = INTVAL (rtl);
18318 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18319 val &= GET_MODE_MASK (DECL_MODE (loc));
18320 ret = int_loc_descriptor (val);
18321 }
18322 else if (GET_CODE (rtl) == CONST_STRING)
18323 {
18324 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18325 return 0;
18326 }
18327 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18328 ret = new_addr_loc_descr (rtl, dtprel_false);
18329 else
18330 {
18331 machine_mode mode, mem_mode;
18332
18333 /* Certain constructs can only be represented at top-level. */
18334 if (want_address == 2)
18335 {
18336 ret = loc_descriptor (rtl, VOIDmode,
18337 VAR_INIT_STATUS_INITIALIZED);
18338 have_address = 1;
18339 }
18340 else
18341 {
18342 mode = GET_MODE (rtl);
18343 mem_mode = VOIDmode;
18344 if (MEM_P (rtl))
18345 {
18346 mem_mode = mode;
18347 mode = get_address_mode (rtl);
18348 rtl = XEXP (rtl, 0);
18349 have_address = 1;
18350 }
18351 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18352 VAR_INIT_STATUS_INITIALIZED);
18353 }
18354 if (!ret)
18355 expansion_failed (loc, rtl,
18356 "failed to produce loc descriptor for rtl");
18357 }
18358 }
18359 break;
18360
18361 case MEM_REF:
18362 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18363 {
18364 have_address = 1;
18365 goto do_plus;
18366 }
18367 /* Fallthru. */
18368 case INDIRECT_REF:
18369 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18370 have_address = 1;
18371 break;
18372
18373 case TARGET_MEM_REF:
18374 case SSA_NAME:
18375 case DEBUG_EXPR_DECL:
18376 return NULL;
18377
18378 case COMPOUND_EXPR:
18379 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18380 context);
18381
18382 CASE_CONVERT:
18383 case VIEW_CONVERT_EXPR:
18384 case SAVE_EXPR:
18385 case MODIFY_EXPR:
18386 case NON_LVALUE_EXPR:
18387 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18388 context);
18389
18390 case COMPONENT_REF:
18391 case BIT_FIELD_REF:
18392 case ARRAY_REF:
18393 case ARRAY_RANGE_REF:
18394 case REALPART_EXPR:
18395 case IMAGPART_EXPR:
18396 {
18397 tree obj, offset;
18398 poly_int64 bitsize, bitpos, bytepos;
18399 machine_mode mode;
18400 int unsignedp, reversep, volatilep = 0;
18401
18402 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18403 &unsignedp, &reversep, &volatilep);
18404
18405 gcc_assert (obj != loc);
18406
18407 list_ret = loc_list_from_tree_1 (obj,
18408 want_address == 2
18409 && known_eq (bitpos, 0)
18410 && !offset ? 2 : 1,
18411 context);
18412 /* TODO: We can extract value of the small expression via shifting even
18413 for nonzero bitpos. */
18414 if (list_ret == 0)
18415 return 0;
18416 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18417 || !multiple_p (bitsize, BITS_PER_UNIT))
18418 {
18419 expansion_failed (loc, NULL_RTX,
18420 "bitfield access");
18421 return 0;
18422 }
18423
18424 if (offset != NULL_TREE)
18425 {
18426 /* Variable offset. */
18427 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18428 if (list_ret1 == 0)
18429 return 0;
18430 add_loc_list (&list_ret, list_ret1);
18431 if (!list_ret)
18432 return 0;
18433 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18434 }
18435
18436 HOST_WIDE_INT value;
18437 if (bytepos.is_constant (&value) && value > 0)
18438 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18439 value, 0));
18440 else if (maybe_ne (bytepos, 0))
18441 loc_list_plus_const (list_ret, bytepos);
18442
18443 have_address = 1;
18444 break;
18445 }
18446
18447 case INTEGER_CST:
18448 if ((want_address || !tree_fits_shwi_p (loc))
18449 && (ret = cst_pool_loc_descr (loc)))
18450 have_address = 1;
18451 else if (want_address == 2
18452 && tree_fits_shwi_p (loc)
18453 && (ret = address_of_int_loc_descriptor
18454 (int_size_in_bytes (TREE_TYPE (loc)),
18455 tree_to_shwi (loc))))
18456 have_address = 1;
18457 else if (tree_fits_shwi_p (loc))
18458 ret = int_loc_descriptor (tree_to_shwi (loc));
18459 else if (tree_fits_uhwi_p (loc))
18460 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18461 else
18462 {
18463 expansion_failed (loc, NULL_RTX,
18464 "Integer operand is not host integer");
18465 return 0;
18466 }
18467 break;
18468
18469 case CONSTRUCTOR:
18470 case REAL_CST:
18471 case STRING_CST:
18472 case COMPLEX_CST:
18473 if ((ret = cst_pool_loc_descr (loc)))
18474 have_address = 1;
18475 else if (TREE_CODE (loc) == CONSTRUCTOR)
18476 {
18477 tree type = TREE_TYPE (loc);
18478 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18479 unsigned HOST_WIDE_INT offset = 0;
18480 unsigned HOST_WIDE_INT cnt;
18481 constructor_elt *ce;
18482
18483 if (TREE_CODE (type) == RECORD_TYPE)
18484 {
18485 /* This is very limited, but it's enough to output
18486 pointers to member functions, as long as the
18487 referenced function is defined in the current
18488 translation unit. */
18489 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18490 {
18491 tree val = ce->value;
18492
18493 tree field = ce->index;
18494
18495 if (val)
18496 STRIP_NOPS (val);
18497
18498 if (!field || DECL_BIT_FIELD (field))
18499 {
18500 expansion_failed (loc, NULL_RTX,
18501 "bitfield in record type constructor");
18502 size = offset = (unsigned HOST_WIDE_INT)-1;
18503 ret = NULL;
18504 break;
18505 }
18506
18507 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18508 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18509 gcc_assert (pos + fieldsize <= size);
18510 if (pos < offset)
18511 {
18512 expansion_failed (loc, NULL_RTX,
18513 "out-of-order fields in record constructor");
18514 size = offset = (unsigned HOST_WIDE_INT)-1;
18515 ret = NULL;
18516 break;
18517 }
18518 if (pos > offset)
18519 {
18520 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18521 add_loc_descr (&ret, ret1);
18522 offset = pos;
18523 }
18524 if (val && fieldsize != 0)
18525 {
18526 ret1 = loc_descriptor_from_tree (val, want_address, context);
18527 if (!ret1)
18528 {
18529 expansion_failed (loc, NULL_RTX,
18530 "unsupported expression in field");
18531 size = offset = (unsigned HOST_WIDE_INT)-1;
18532 ret = NULL;
18533 break;
18534 }
18535 add_loc_descr (&ret, ret1);
18536 }
18537 if (fieldsize)
18538 {
18539 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18540 add_loc_descr (&ret, ret1);
18541 offset = pos + fieldsize;
18542 }
18543 }
18544
18545 if (offset != size)
18546 {
18547 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18548 add_loc_descr (&ret, ret1);
18549 offset = size;
18550 }
18551
18552 have_address = !!want_address;
18553 }
18554 else
18555 expansion_failed (loc, NULL_RTX,
18556 "constructor of non-record type");
18557 }
18558 else
18559 /* We can construct small constants here using int_loc_descriptor. */
18560 expansion_failed (loc, NULL_RTX,
18561 "constructor or constant not in constant pool");
18562 break;
18563
18564 case TRUTH_AND_EXPR:
18565 case TRUTH_ANDIF_EXPR:
18566 case BIT_AND_EXPR:
18567 op = DW_OP_and;
18568 goto do_binop;
18569
18570 case TRUTH_XOR_EXPR:
18571 case BIT_XOR_EXPR:
18572 op = DW_OP_xor;
18573 goto do_binop;
18574
18575 case TRUTH_OR_EXPR:
18576 case TRUTH_ORIF_EXPR:
18577 case BIT_IOR_EXPR:
18578 op = DW_OP_or;
18579 goto do_binop;
18580
18581 case FLOOR_DIV_EXPR:
18582 case CEIL_DIV_EXPR:
18583 case ROUND_DIV_EXPR:
18584 case TRUNC_DIV_EXPR:
18585 case EXACT_DIV_EXPR:
18586 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18587 return 0;
18588 op = DW_OP_div;
18589 goto do_binop;
18590
18591 case MINUS_EXPR:
18592 op = DW_OP_minus;
18593 goto do_binop;
18594
18595 case FLOOR_MOD_EXPR:
18596 case CEIL_MOD_EXPR:
18597 case ROUND_MOD_EXPR:
18598 case TRUNC_MOD_EXPR:
18599 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18600 {
18601 op = DW_OP_mod;
18602 goto do_binop;
18603 }
18604 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18605 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18606 if (list_ret == 0 || list_ret1 == 0)
18607 return 0;
18608
18609 add_loc_list (&list_ret, list_ret1);
18610 if (list_ret == 0)
18611 return 0;
18612 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18613 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18614 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18615 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18616 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18617 break;
18618
18619 case MULT_EXPR:
18620 op = DW_OP_mul;
18621 goto do_binop;
18622
18623 case LSHIFT_EXPR:
18624 op = DW_OP_shl;
18625 goto do_binop;
18626
18627 case RSHIFT_EXPR:
18628 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18629 goto do_binop;
18630
18631 case POINTER_PLUS_EXPR:
18632 case PLUS_EXPR:
18633 do_plus:
18634 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18635 {
18636 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18637 smarter to encode their opposite. The DW_OP_plus_uconst operation
18638 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18639 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18640 bytes, Y being the size of the operation that pushes the opposite
18641 of the addend. So let's choose the smallest representation. */
18642 const tree tree_addend = TREE_OPERAND (loc, 1);
18643 offset_int wi_addend;
18644 HOST_WIDE_INT shwi_addend;
18645 dw_loc_descr_ref loc_naddend;
18646
18647 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18648 if (list_ret == 0)
18649 return 0;
18650
18651 /* Try to get the literal to push. It is the opposite of the addend,
18652 so as we rely on wrapping during DWARF evaluation, first decode
18653 the literal as a "DWARF-sized" signed number. */
18654 wi_addend = wi::to_offset (tree_addend);
18655 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18656 shwi_addend = wi_addend.to_shwi ();
18657 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18658 ? int_loc_descriptor (-shwi_addend)
18659 : NULL;
18660
18661 if (loc_naddend != NULL
18662 && ((unsigned) size_of_uleb128 (shwi_addend)
18663 > size_of_loc_descr (loc_naddend)))
18664 {
18665 add_loc_descr_to_each (list_ret, loc_naddend);
18666 add_loc_descr_to_each (list_ret,
18667 new_loc_descr (DW_OP_minus, 0, 0));
18668 }
18669 else
18670 {
18671 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18672 {
18673 loc_naddend = loc_cur;
18674 loc_cur = loc_cur->dw_loc_next;
18675 ggc_free (loc_naddend);
18676 }
18677 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18678 }
18679 break;
18680 }
18681
18682 op = DW_OP_plus;
18683 goto do_binop;
18684
18685 case LE_EXPR:
18686 op = DW_OP_le;
18687 goto do_comp_binop;
18688
18689 case GE_EXPR:
18690 op = DW_OP_ge;
18691 goto do_comp_binop;
18692
18693 case LT_EXPR:
18694 op = DW_OP_lt;
18695 goto do_comp_binop;
18696
18697 case GT_EXPR:
18698 op = DW_OP_gt;
18699 goto do_comp_binop;
18700
18701 do_comp_binop:
18702 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18703 {
18704 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18705 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18706 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18707 TREE_CODE (loc));
18708 break;
18709 }
18710 else
18711 goto do_binop;
18712
18713 case EQ_EXPR:
18714 op = DW_OP_eq;
18715 goto do_binop;
18716
18717 case NE_EXPR:
18718 op = DW_OP_ne;
18719 goto do_binop;
18720
18721 do_binop:
18722 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18723 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18724 if (list_ret == 0 || list_ret1 == 0)
18725 return 0;
18726
18727 add_loc_list (&list_ret, list_ret1);
18728 if (list_ret == 0)
18729 return 0;
18730 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18731 break;
18732
18733 case TRUTH_NOT_EXPR:
18734 case BIT_NOT_EXPR:
18735 op = DW_OP_not;
18736 goto do_unop;
18737
18738 case ABS_EXPR:
18739 op = DW_OP_abs;
18740 goto do_unop;
18741
18742 case NEGATE_EXPR:
18743 op = DW_OP_neg;
18744 goto do_unop;
18745
18746 do_unop:
18747 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18748 if (list_ret == 0)
18749 return 0;
18750
18751 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18752 break;
18753
18754 case MIN_EXPR:
18755 case MAX_EXPR:
18756 {
18757 const enum tree_code code =
18758 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18759
18760 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18761 build2 (code, integer_type_node,
18762 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18763 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18764 }
18765
18766 /* fall through */
18767
18768 case COND_EXPR:
18769 {
18770 dw_loc_descr_ref lhs
18771 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18772 dw_loc_list_ref rhs
18773 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18774 dw_loc_descr_ref bra_node, jump_node, tmp;
18775
18776 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18777 if (list_ret == 0 || lhs == 0 || rhs == 0)
18778 return 0;
18779
18780 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18781 add_loc_descr_to_each (list_ret, bra_node);
18782
18783 add_loc_list (&list_ret, rhs);
18784 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18785 add_loc_descr_to_each (list_ret, jump_node);
18786
18787 add_loc_descr_to_each (list_ret, lhs);
18788 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18789 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18790
18791 /* ??? Need a node to point the skip at. Use a nop. */
18792 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18793 add_loc_descr_to_each (list_ret, tmp);
18794 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18795 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18796 }
18797 break;
18798
18799 case FIX_TRUNC_EXPR:
18800 return 0;
18801
18802 default:
18803 /* Leave front-end specific codes as simply unknown. This comes
18804 up, for instance, with the C STMT_EXPR. */
18805 if ((unsigned int) TREE_CODE (loc)
18806 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18807 {
18808 expansion_failed (loc, NULL_RTX,
18809 "language specific tree node");
18810 return 0;
18811 }
18812
18813 /* Otherwise this is a generic code; we should just lists all of
18814 these explicitly. We forgot one. */
18815 if (flag_checking)
18816 gcc_unreachable ();
18817
18818 /* In a release build, we want to degrade gracefully: better to
18819 generate incomplete debugging information than to crash. */
18820 return NULL;
18821 }
18822
18823 if (!ret && !list_ret)
18824 return 0;
18825
18826 if (want_address == 2 && !have_address
18827 && (dwarf_version >= 4 || !dwarf_strict))
18828 {
18829 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18830 {
18831 expansion_failed (loc, NULL_RTX,
18832 "DWARF address size mismatch");
18833 return 0;
18834 }
18835 if (ret)
18836 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18837 else
18838 add_loc_descr_to_each (list_ret,
18839 new_loc_descr (DW_OP_stack_value, 0, 0));
18840 have_address = 1;
18841 }
18842 /* Show if we can't fill the request for an address. */
18843 if (want_address && !have_address)
18844 {
18845 expansion_failed (loc, NULL_RTX,
18846 "Want address and only have value");
18847 return 0;
18848 }
18849
18850 gcc_assert (!ret || !list_ret);
18851
18852 /* If we've got an address and don't want one, dereference. */
18853 if (!want_address && have_address)
18854 {
18855 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18856
18857 if (size > DWARF2_ADDR_SIZE || size == -1)
18858 {
18859 expansion_failed (loc, NULL_RTX,
18860 "DWARF address size mismatch");
18861 return 0;
18862 }
18863 else if (size == DWARF2_ADDR_SIZE)
18864 op = DW_OP_deref;
18865 else
18866 op = DW_OP_deref_size;
18867
18868 if (ret)
18869 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18870 else
18871 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18872 }
18873 if (ret)
18874 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18875
18876 return list_ret;
18877 }
18878
18879 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18880 expressions. */
18881
18882 static dw_loc_list_ref
18883 loc_list_from_tree (tree loc, int want_address,
18884 struct loc_descr_context *context)
18885 {
18886 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18887
18888 for (dw_loc_list_ref loc_cur = result;
18889 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18890 loc_descr_without_nops (loc_cur->expr);
18891 return result;
18892 }
18893
18894 /* Same as above but return only single location expression. */
18895 static dw_loc_descr_ref
18896 loc_descriptor_from_tree (tree loc, int want_address,
18897 struct loc_descr_context *context)
18898 {
18899 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18900 if (!ret)
18901 return NULL;
18902 if (ret->dw_loc_next)
18903 {
18904 expansion_failed (loc, NULL_RTX,
18905 "Location list where only loc descriptor needed");
18906 return NULL;
18907 }
18908 return ret->expr;
18909 }
18910
18911 /* Given a value, round it up to the lowest multiple of `boundary'
18912 which is not less than the value itself. */
18913
18914 static inline HOST_WIDE_INT
18915 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18916 {
18917 return (((value + boundary - 1) / boundary) * boundary);
18918 }
18919
18920 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18921 pointer to the declared type for the relevant field variable, or return
18922 `integer_type_node' if the given node turns out to be an
18923 ERROR_MARK node. */
18924
18925 static inline tree
18926 field_type (const_tree decl)
18927 {
18928 tree type;
18929
18930 if (TREE_CODE (decl) == ERROR_MARK)
18931 return integer_type_node;
18932
18933 type = DECL_BIT_FIELD_TYPE (decl);
18934 if (type == NULL_TREE)
18935 type = TREE_TYPE (decl);
18936
18937 return type;
18938 }
18939
18940 /* Given a pointer to a tree node, return the alignment in bits for
18941 it, or else return BITS_PER_WORD if the node actually turns out to
18942 be an ERROR_MARK node. */
18943
18944 static inline unsigned
18945 simple_type_align_in_bits (const_tree type)
18946 {
18947 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18948 }
18949
18950 static inline unsigned
18951 simple_decl_align_in_bits (const_tree decl)
18952 {
18953 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18954 }
18955
18956 /* Return the result of rounding T up to ALIGN. */
18957
18958 static inline offset_int
18959 round_up_to_align (const offset_int &t, unsigned int align)
18960 {
18961 return wi::udiv_trunc (t + align - 1, align) * align;
18962 }
18963
18964 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18965 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18966 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18967 if we fail to return the size in one of these two forms. */
18968
18969 static dw_loc_descr_ref
18970 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18971 {
18972 tree tree_size;
18973 struct loc_descr_context ctx;
18974
18975 /* Return a constant integer in priority, if possible. */
18976 *cst_size = int_size_in_bytes (type);
18977 if (*cst_size != -1)
18978 return NULL;
18979
18980 ctx.context_type = const_cast<tree> (type);
18981 ctx.base_decl = NULL_TREE;
18982 ctx.dpi = NULL;
18983 ctx.placeholder_arg = false;
18984 ctx.placeholder_seen = false;
18985
18986 type = TYPE_MAIN_VARIANT (type);
18987 tree_size = TYPE_SIZE_UNIT (type);
18988 return ((tree_size != NULL_TREE)
18989 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18990 : NULL);
18991 }
18992
18993 /* Helper structure for RECORD_TYPE processing. */
18994 struct vlr_context
18995 {
18996 /* Root RECORD_TYPE. It is needed to generate data member location
18997 descriptions in variable-length records (VLR), but also to cope with
18998 variants, which are composed of nested structures multiplexed with
18999 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
19000 function processing a FIELD_DECL, it is required to be non null. */
19001 tree struct_type;
19002 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
19003 QUAL_UNION_TYPE), this holds an expression that computes the offset for
19004 this variant part as part of the root record (in storage units). For
19005 regular records, it must be NULL_TREE. */
19006 tree variant_part_offset;
19007 };
19008
19009 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
19010 addressed byte of the "containing object" for the given FIELD_DECL. If
19011 possible, return a native constant through CST_OFFSET (in which case NULL is
19012 returned); otherwise return a DWARF expression that computes the offset.
19013
19014 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19015 that offset is, either because the argument turns out to be a pointer to an
19016 ERROR_MARK node, or because the offset expression is too complex for us.
19017
19018 CTX is required: see the comment for VLR_CONTEXT. */
19019
19020 static dw_loc_descr_ref
19021 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19022 HOST_WIDE_INT *cst_offset)
19023 {
19024 tree tree_result;
19025 dw_loc_list_ref loc_result;
19026
19027 *cst_offset = 0;
19028
19029 if (TREE_CODE (decl) == ERROR_MARK)
19030 return NULL;
19031 else
19032 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19033
19034 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19035 case. */
19036 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19037 return NULL;
19038
19039 #ifdef PCC_BITFIELD_TYPE_MATTERS
19040 /* We used to handle only constant offsets in all cases. Now, we handle
19041 properly dynamic byte offsets only when PCC bitfield type doesn't
19042 matter. */
19043 if (PCC_BITFIELD_TYPE_MATTERS
19044 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19045 {
19046 offset_int object_offset_in_bits;
19047 offset_int object_offset_in_bytes;
19048 offset_int bitpos_int;
19049 tree type;
19050 tree field_size_tree;
19051 offset_int deepest_bitpos;
19052 offset_int field_size_in_bits;
19053 unsigned int type_align_in_bits;
19054 unsigned int decl_align_in_bits;
19055 offset_int type_size_in_bits;
19056
19057 bitpos_int = wi::to_offset (bit_position (decl));
19058 type = field_type (decl);
19059 type_size_in_bits = offset_int_type_size_in_bits (type);
19060 type_align_in_bits = simple_type_align_in_bits (type);
19061
19062 field_size_tree = DECL_SIZE (decl);
19063
19064 /* The size could be unspecified if there was an error, or for
19065 a flexible array member. */
19066 if (!field_size_tree)
19067 field_size_tree = bitsize_zero_node;
19068
19069 /* If the size of the field is not constant, use the type size. */
19070 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19071 field_size_in_bits = wi::to_offset (field_size_tree);
19072 else
19073 field_size_in_bits = type_size_in_bits;
19074
19075 decl_align_in_bits = simple_decl_align_in_bits (decl);
19076
19077 /* The GCC front-end doesn't make any attempt to keep track of the
19078 starting bit offset (relative to the start of the containing
19079 structure type) of the hypothetical "containing object" for a
19080 bit-field. Thus, when computing the byte offset value for the
19081 start of the "containing object" of a bit-field, we must deduce
19082 this information on our own. This can be rather tricky to do in
19083 some cases. For example, handling the following structure type
19084 definition when compiling for an i386/i486 target (which only
19085 aligns long long's to 32-bit boundaries) can be very tricky:
19086
19087 struct S { int field1; long long field2:31; };
19088
19089 Fortunately, there is a simple rule-of-thumb which can be used
19090 in such cases. When compiling for an i386/i486, GCC will
19091 allocate 8 bytes for the structure shown above. It decides to
19092 do this based upon one simple rule for bit-field allocation.
19093 GCC allocates each "containing object" for each bit-field at
19094 the first (i.e. lowest addressed) legitimate alignment boundary
19095 (based upon the required minimum alignment for the declared
19096 type of the field) which it can possibly use, subject to the
19097 condition that there is still enough available space remaining
19098 in the containing object (when allocated at the selected point)
19099 to fully accommodate all of the bits of the bit-field itself.
19100
19101 This simple rule makes it obvious why GCC allocates 8 bytes for
19102 each object of the structure type shown above. When looking
19103 for a place to allocate the "containing object" for `field2',
19104 the compiler simply tries to allocate a 64-bit "containing
19105 object" at each successive 32-bit boundary (starting at zero)
19106 until it finds a place to allocate that 64- bit field such that
19107 at least 31 contiguous (and previously unallocated) bits remain
19108 within that selected 64 bit field. (As it turns out, for the
19109 example above, the compiler finds it is OK to allocate the
19110 "containing object" 64-bit field at bit-offset zero within the
19111 structure type.)
19112
19113 Here we attempt to work backwards from the limited set of facts
19114 we're given, and we try to deduce from those facts, where GCC
19115 must have believed that the containing object started (within
19116 the structure type). The value we deduce is then used (by the
19117 callers of this routine) to generate DW_AT_location and
19118 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19119 the case of DW_AT_location, regular fields as well). */
19120
19121 /* Figure out the bit-distance from the start of the structure to
19122 the "deepest" bit of the bit-field. */
19123 deepest_bitpos = bitpos_int + field_size_in_bits;
19124
19125 /* This is the tricky part. Use some fancy footwork to deduce
19126 where the lowest addressed bit of the containing object must
19127 be. */
19128 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19129
19130 /* Round up to type_align by default. This works best for
19131 bitfields. */
19132 object_offset_in_bits
19133 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19134
19135 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19136 {
19137 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19138
19139 /* Round up to decl_align instead. */
19140 object_offset_in_bits
19141 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19142 }
19143
19144 object_offset_in_bytes
19145 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19146 if (ctx->variant_part_offset == NULL_TREE)
19147 {
19148 *cst_offset = object_offset_in_bytes.to_shwi ();
19149 return NULL;
19150 }
19151 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19152 }
19153 else
19154 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19155 tree_result = byte_position (decl);
19156
19157 if (ctx->variant_part_offset != NULL_TREE)
19158 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19159 ctx->variant_part_offset, tree_result);
19160
19161 /* If the byte offset is a constant, it's simplier to handle a native
19162 constant rather than a DWARF expression. */
19163 if (TREE_CODE (tree_result) == INTEGER_CST)
19164 {
19165 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19166 return NULL;
19167 }
19168 struct loc_descr_context loc_ctx = {
19169 ctx->struct_type, /* context_type */
19170 NULL_TREE, /* base_decl */
19171 NULL, /* dpi */
19172 false, /* placeholder_arg */
19173 false /* placeholder_seen */
19174 };
19175 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19176
19177 /* We want a DWARF expression: abort if we only have a location list with
19178 multiple elements. */
19179 if (!loc_result || !single_element_loc_list_p (loc_result))
19180 return NULL;
19181 else
19182 return loc_result->expr;
19183 }
19184 \f
19185 /* The following routines define various Dwarf attributes and any data
19186 associated with them. */
19187
19188 /* Add a location description attribute value to a DIE.
19189
19190 This emits location attributes suitable for whole variables and
19191 whole parameters. Note that the location attributes for struct fields are
19192 generated by the routine `data_member_location_attribute' below. */
19193
19194 static inline void
19195 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19196 dw_loc_list_ref descr)
19197 {
19198 bool check_no_locviews = true;
19199 if (descr == 0)
19200 return;
19201 if (single_element_loc_list_p (descr))
19202 add_AT_loc (die, attr_kind, descr->expr);
19203 else
19204 {
19205 add_AT_loc_list (die, attr_kind, descr);
19206 gcc_assert (descr->ll_symbol);
19207 if (attr_kind == DW_AT_location && descr->vl_symbol
19208 && dwarf2out_locviews_in_attribute ())
19209 {
19210 add_AT_view_list (die, DW_AT_GNU_locviews);
19211 check_no_locviews = false;
19212 }
19213 }
19214
19215 if (check_no_locviews)
19216 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19217 }
19218
19219 /* Add DW_AT_accessibility attribute to DIE if needed. */
19220
19221 static void
19222 add_accessibility_attribute (dw_die_ref die, tree decl)
19223 {
19224 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19225 children, otherwise the default is DW_ACCESS_public. In DWARF2
19226 the default has always been DW_ACCESS_public. */
19227 if (TREE_PROTECTED (decl))
19228 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19229 else if (TREE_PRIVATE (decl))
19230 {
19231 if (dwarf_version == 2
19232 || die->die_parent == NULL
19233 || die->die_parent->die_tag != DW_TAG_class_type)
19234 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19235 }
19236 else if (dwarf_version > 2
19237 && die->die_parent
19238 && die->die_parent->die_tag == DW_TAG_class_type)
19239 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19240 }
19241
19242 /* Attach the specialized form of location attribute used for data members of
19243 struct and union types. In the special case of a FIELD_DECL node which
19244 represents a bit-field, the "offset" part of this special location
19245 descriptor must indicate the distance in bytes from the lowest-addressed
19246 byte of the containing struct or union type to the lowest-addressed byte of
19247 the "containing object" for the bit-field. (See the `field_byte_offset'
19248 function above).
19249
19250 For any given bit-field, the "containing object" is a hypothetical object
19251 (of some integral or enum type) within which the given bit-field lives. The
19252 type of this hypothetical "containing object" is always the same as the
19253 declared type of the individual bit-field itself (for GCC anyway... the
19254 DWARF spec doesn't actually mandate this). Note that it is the size (in
19255 bytes) of the hypothetical "containing object" which will be given in the
19256 DW_AT_byte_size attribute for this bit-field. (See the
19257 `byte_size_attribute' function below.) It is also used when calculating the
19258 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19259 function below.)
19260
19261 CTX is required: see the comment for VLR_CONTEXT. */
19262
19263 static void
19264 add_data_member_location_attribute (dw_die_ref die,
19265 tree decl,
19266 struct vlr_context *ctx)
19267 {
19268 HOST_WIDE_INT offset;
19269 dw_loc_descr_ref loc_descr = 0;
19270
19271 if (TREE_CODE (decl) == TREE_BINFO)
19272 {
19273 /* We're working on the TAG_inheritance for a base class. */
19274 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19275 {
19276 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19277 aren't at a fixed offset from all (sub)objects of the same
19278 type. We need to extract the appropriate offset from our
19279 vtable. The following dwarf expression means
19280
19281 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19282
19283 This is specific to the V3 ABI, of course. */
19284
19285 dw_loc_descr_ref tmp;
19286
19287 /* Make a copy of the object address. */
19288 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19289 add_loc_descr (&loc_descr, tmp);
19290
19291 /* Extract the vtable address. */
19292 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19293 add_loc_descr (&loc_descr, tmp);
19294
19295 /* Calculate the address of the offset. */
19296 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19297 gcc_assert (offset < 0);
19298
19299 tmp = int_loc_descriptor (-offset);
19300 add_loc_descr (&loc_descr, tmp);
19301 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19302 add_loc_descr (&loc_descr, tmp);
19303
19304 /* Extract the offset. */
19305 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19306 add_loc_descr (&loc_descr, tmp);
19307
19308 /* Add it to the object address. */
19309 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19310 add_loc_descr (&loc_descr, tmp);
19311 }
19312 else
19313 offset = tree_to_shwi (BINFO_OFFSET (decl));
19314 }
19315 else
19316 {
19317 loc_descr = field_byte_offset (decl, ctx, &offset);
19318
19319 /* If loc_descr is available then we know the field offset is dynamic.
19320 However, GDB does not handle dynamic field offsets very well at the
19321 moment. */
19322 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19323 {
19324 loc_descr = NULL;
19325 offset = 0;
19326 }
19327
19328 /* Data member location evalutation starts with the base address on the
19329 stack. Compute the field offset and add it to this base address. */
19330 else if (loc_descr != NULL)
19331 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19332 }
19333
19334 if (! loc_descr)
19335 {
19336 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19337 e.g. GDB only added support to it in November 2016. For DWARF5
19338 we need newer debug info consumers anyway. We might change this
19339 to dwarf_version >= 4 once most consumers catched up. */
19340 if (dwarf_version >= 5
19341 && TREE_CODE (decl) == FIELD_DECL
19342 && DECL_BIT_FIELD_TYPE (decl))
19343 {
19344 tree off = bit_position (decl);
19345 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19346 {
19347 remove_AT (die, DW_AT_byte_size);
19348 remove_AT (die, DW_AT_bit_offset);
19349 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19350 return;
19351 }
19352 }
19353 if (dwarf_version > 2)
19354 {
19355 /* Don't need to output a location expression, just the constant. */
19356 if (offset < 0)
19357 add_AT_int (die, DW_AT_data_member_location, offset);
19358 else
19359 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19360 return;
19361 }
19362 else
19363 {
19364 enum dwarf_location_atom op;
19365
19366 /* The DWARF2 standard says that we should assume that the structure
19367 address is already on the stack, so we can specify a structure
19368 field address by using DW_OP_plus_uconst. */
19369 op = DW_OP_plus_uconst;
19370 loc_descr = new_loc_descr (op, offset, 0);
19371 }
19372 }
19373
19374 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19375 }
19376
19377 /* Writes integer values to dw_vec_const array. */
19378
19379 static void
19380 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19381 {
19382 while (size != 0)
19383 {
19384 *dest++ = val & 0xff;
19385 val >>= 8;
19386 --size;
19387 }
19388 }
19389
19390 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19391
19392 static HOST_WIDE_INT
19393 extract_int (const unsigned char *src, unsigned int size)
19394 {
19395 HOST_WIDE_INT val = 0;
19396
19397 src += size;
19398 while (size != 0)
19399 {
19400 val <<= 8;
19401 val |= *--src & 0xff;
19402 --size;
19403 }
19404 return val;
19405 }
19406
19407 /* Writes wide_int values to dw_vec_const array. */
19408
19409 static void
19410 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19411 {
19412 int i;
19413
19414 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19415 {
19416 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19417 return;
19418 }
19419
19420 /* We'd have to extend this code to support odd sizes. */
19421 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19422
19423 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19424
19425 if (WORDS_BIG_ENDIAN)
19426 for (i = n - 1; i >= 0; i--)
19427 {
19428 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19429 dest += sizeof (HOST_WIDE_INT);
19430 }
19431 else
19432 for (i = 0; i < n; i++)
19433 {
19434 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19435 dest += sizeof (HOST_WIDE_INT);
19436 }
19437 }
19438
19439 /* Writes floating point values to dw_vec_const array. */
19440
19441 static void
19442 insert_float (const_rtx rtl, unsigned char *array)
19443 {
19444 long val[4];
19445 int i;
19446 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19447
19448 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19449
19450 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19451 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19452 {
19453 insert_int (val[i], 4, array);
19454 array += 4;
19455 }
19456 }
19457
19458 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19459 does not have a "location" either in memory or in a register. These
19460 things can arise in GNU C when a constant is passed as an actual parameter
19461 to an inlined function. They can also arise in C++ where declared
19462 constants do not necessarily get memory "homes". */
19463
19464 static bool
19465 add_const_value_attribute (dw_die_ref die, rtx rtl)
19466 {
19467 switch (GET_CODE (rtl))
19468 {
19469 case CONST_INT:
19470 {
19471 HOST_WIDE_INT val = INTVAL (rtl);
19472
19473 if (val < 0)
19474 add_AT_int (die, DW_AT_const_value, val);
19475 else
19476 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19477 }
19478 return true;
19479
19480 case CONST_WIDE_INT:
19481 {
19482 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19483 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19484 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19485 wide_int w = wi::zext (w1, prec);
19486 add_AT_wide (die, DW_AT_const_value, w);
19487 }
19488 return true;
19489
19490 case CONST_DOUBLE:
19491 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19492 floating-point constant. A CONST_DOUBLE is used whenever the
19493 constant requires more than one word in order to be adequately
19494 represented. */
19495 if (TARGET_SUPPORTS_WIDE_INT == 0
19496 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19497 add_AT_double (die, DW_AT_const_value,
19498 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19499 else
19500 {
19501 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19502 unsigned int length = GET_MODE_SIZE (mode);
19503 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19504
19505 insert_float (rtl, array);
19506 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19507 }
19508 return true;
19509
19510 case CONST_VECTOR:
19511 {
19512 unsigned int length;
19513 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19514 return false;
19515
19516 machine_mode mode = GET_MODE (rtl);
19517 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19518 unsigned char *array
19519 = ggc_vec_alloc<unsigned char> (length * elt_size);
19520 unsigned int i;
19521 unsigned char *p;
19522 machine_mode imode = GET_MODE_INNER (mode);
19523
19524 switch (GET_MODE_CLASS (mode))
19525 {
19526 case MODE_VECTOR_INT:
19527 for (i = 0, p = array; i < length; i++, p += elt_size)
19528 {
19529 rtx elt = CONST_VECTOR_ELT (rtl, i);
19530 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19531 }
19532 break;
19533
19534 case MODE_VECTOR_FLOAT:
19535 for (i = 0, p = array; i < length; i++, p += elt_size)
19536 {
19537 rtx elt = CONST_VECTOR_ELT (rtl, i);
19538 insert_float (elt, p);
19539 }
19540 break;
19541
19542 default:
19543 gcc_unreachable ();
19544 }
19545
19546 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19547 }
19548 return true;
19549
19550 case CONST_STRING:
19551 if (dwarf_version >= 4 || !dwarf_strict)
19552 {
19553 dw_loc_descr_ref loc_result;
19554 resolve_one_addr (&rtl);
19555 rtl_addr:
19556 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19557 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19558 add_AT_loc (die, DW_AT_location, loc_result);
19559 vec_safe_push (used_rtx_array, rtl);
19560 return true;
19561 }
19562 return false;
19563
19564 case CONST:
19565 if (CONSTANT_P (XEXP (rtl, 0)))
19566 return add_const_value_attribute (die, XEXP (rtl, 0));
19567 /* FALLTHROUGH */
19568 case SYMBOL_REF:
19569 if (!const_ok_for_output (rtl))
19570 return false;
19571 /* FALLTHROUGH */
19572 case LABEL_REF:
19573 if (dwarf_version >= 4 || !dwarf_strict)
19574 goto rtl_addr;
19575 return false;
19576
19577 case PLUS:
19578 /* In cases where an inlined instance of an inline function is passed
19579 the address of an `auto' variable (which is local to the caller) we
19580 can get a situation where the DECL_RTL of the artificial local
19581 variable (for the inlining) which acts as a stand-in for the
19582 corresponding formal parameter (of the inline function) will look
19583 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19584 exactly a compile-time constant expression, but it isn't the address
19585 of the (artificial) local variable either. Rather, it represents the
19586 *value* which the artificial local variable always has during its
19587 lifetime. We currently have no way to represent such quasi-constant
19588 values in Dwarf, so for now we just punt and generate nothing. */
19589 return false;
19590
19591 case HIGH:
19592 case CONST_FIXED:
19593 return false;
19594
19595 case MEM:
19596 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19597 && MEM_READONLY_P (rtl)
19598 && GET_MODE (rtl) == BLKmode)
19599 {
19600 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19601 return true;
19602 }
19603 return false;
19604
19605 default:
19606 /* No other kinds of rtx should be possible here. */
19607 gcc_unreachable ();
19608 }
19609 return false;
19610 }
19611
19612 /* Determine whether the evaluation of EXPR references any variables
19613 or functions which aren't otherwise used (and therefore may not be
19614 output). */
19615 static tree
19616 reference_to_unused (tree * tp, int * walk_subtrees,
19617 void * data ATTRIBUTE_UNUSED)
19618 {
19619 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19620 *walk_subtrees = 0;
19621
19622 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19623 && ! TREE_ASM_WRITTEN (*tp))
19624 return *tp;
19625 /* ??? The C++ FE emits debug information for using decls, so
19626 putting gcc_unreachable here falls over. See PR31899. For now
19627 be conservative. */
19628 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19629 return *tp;
19630 else if (VAR_P (*tp))
19631 {
19632 varpool_node *node = varpool_node::get (*tp);
19633 if (!node || !node->definition)
19634 return *tp;
19635 }
19636 else if (TREE_CODE (*tp) == FUNCTION_DECL
19637 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19638 {
19639 /* The call graph machinery must have finished analyzing,
19640 optimizing and gimplifying the CU by now.
19641 So if *TP has no call graph node associated
19642 to it, it means *TP will not be emitted. */
19643 if (!cgraph_node::get (*tp))
19644 return *tp;
19645 }
19646 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19647 return *tp;
19648
19649 return NULL_TREE;
19650 }
19651
19652 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19653 for use in a later add_const_value_attribute call. */
19654
19655 static rtx
19656 rtl_for_decl_init (tree init, tree type)
19657 {
19658 rtx rtl = NULL_RTX;
19659
19660 STRIP_NOPS (init);
19661
19662 /* If a variable is initialized with a string constant without embedded
19663 zeros, build CONST_STRING. */
19664 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19665 {
19666 tree enttype = TREE_TYPE (type);
19667 tree domain = TYPE_DOMAIN (type);
19668 scalar_int_mode mode;
19669
19670 if (is_int_mode (TYPE_MODE (enttype), &mode)
19671 && GET_MODE_SIZE (mode) == 1
19672 && domain
19673 && TYPE_MAX_VALUE (domain)
19674 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19675 && integer_zerop (TYPE_MIN_VALUE (domain))
19676 && compare_tree_int (TYPE_MAX_VALUE (domain),
19677 TREE_STRING_LENGTH (init) - 1) == 0
19678 && ((size_t) TREE_STRING_LENGTH (init)
19679 == strlen (TREE_STRING_POINTER (init)) + 1))
19680 {
19681 rtl = gen_rtx_CONST_STRING (VOIDmode,
19682 ggc_strdup (TREE_STRING_POINTER (init)));
19683 rtl = gen_rtx_MEM (BLKmode, rtl);
19684 MEM_READONLY_P (rtl) = 1;
19685 }
19686 }
19687 /* Other aggregates, and complex values, could be represented using
19688 CONCAT: FIXME! */
19689 else if (AGGREGATE_TYPE_P (type)
19690 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19691 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19692 || TREE_CODE (type) == COMPLEX_TYPE)
19693 ;
19694 /* Vectors only work if their mode is supported by the target.
19695 FIXME: generic vectors ought to work too. */
19696 else if (TREE_CODE (type) == VECTOR_TYPE
19697 && !VECTOR_MODE_P (TYPE_MODE (type)))
19698 ;
19699 /* If the initializer is something that we know will expand into an
19700 immediate RTL constant, expand it now. We must be careful not to
19701 reference variables which won't be output. */
19702 else if (initializer_constant_valid_p (init, type)
19703 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19704 {
19705 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19706 possible. */
19707 if (TREE_CODE (type) == VECTOR_TYPE)
19708 switch (TREE_CODE (init))
19709 {
19710 case VECTOR_CST:
19711 break;
19712 case CONSTRUCTOR:
19713 if (TREE_CONSTANT (init))
19714 {
19715 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19716 bool constant_p = true;
19717 tree value;
19718 unsigned HOST_WIDE_INT ix;
19719
19720 /* Even when ctor is constant, it might contain non-*_CST
19721 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19722 belong into VECTOR_CST nodes. */
19723 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19724 if (!CONSTANT_CLASS_P (value))
19725 {
19726 constant_p = false;
19727 break;
19728 }
19729
19730 if (constant_p)
19731 {
19732 init = build_vector_from_ctor (type, elts);
19733 break;
19734 }
19735 }
19736 /* FALLTHRU */
19737
19738 default:
19739 return NULL;
19740 }
19741
19742 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19743
19744 /* If expand_expr returns a MEM, it wasn't immediate. */
19745 gcc_assert (!rtl || !MEM_P (rtl));
19746 }
19747
19748 return rtl;
19749 }
19750
19751 /* Generate RTL for the variable DECL to represent its location. */
19752
19753 static rtx
19754 rtl_for_decl_location (tree decl)
19755 {
19756 rtx rtl;
19757
19758 /* Here we have to decide where we are going to say the parameter "lives"
19759 (as far as the debugger is concerned). We only have a couple of
19760 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19761
19762 DECL_RTL normally indicates where the parameter lives during most of the
19763 activation of the function. If optimization is enabled however, this
19764 could be either NULL or else a pseudo-reg. Both of those cases indicate
19765 that the parameter doesn't really live anywhere (as far as the code
19766 generation parts of GCC are concerned) during most of the function's
19767 activation. That will happen (for example) if the parameter is never
19768 referenced within the function.
19769
19770 We could just generate a location descriptor here for all non-NULL
19771 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19772 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19773 where DECL_RTL is NULL or is a pseudo-reg.
19774
19775 Note however that we can only get away with using DECL_INCOMING_RTL as
19776 a backup substitute for DECL_RTL in certain limited cases. In cases
19777 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19778 we can be sure that the parameter was passed using the same type as it is
19779 declared to have within the function, and that its DECL_INCOMING_RTL
19780 points us to a place where a value of that type is passed.
19781
19782 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19783 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19784 because in these cases DECL_INCOMING_RTL points us to a value of some
19785 type which is *different* from the type of the parameter itself. Thus,
19786 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19787 such cases, the debugger would end up (for example) trying to fetch a
19788 `float' from a place which actually contains the first part of a
19789 `double'. That would lead to really incorrect and confusing
19790 output at debug-time.
19791
19792 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19793 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19794 are a couple of exceptions however. On little-endian machines we can
19795 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19796 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19797 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19798 when (on a little-endian machine) a non-prototyped function has a
19799 parameter declared to be of type `short' or `char'. In such cases,
19800 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19801 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19802 passed `int' value. If the debugger then uses that address to fetch
19803 a `short' or a `char' (on a little-endian machine) the result will be
19804 the correct data, so we allow for such exceptional cases below.
19805
19806 Note that our goal here is to describe the place where the given formal
19807 parameter lives during most of the function's activation (i.e. between the
19808 end of the prologue and the start of the epilogue). We'll do that as best
19809 as we can. Note however that if the given formal parameter is modified
19810 sometime during the execution of the function, then a stack backtrace (at
19811 debug-time) will show the function as having been called with the *new*
19812 value rather than the value which was originally passed in. This happens
19813 rarely enough that it is not a major problem, but it *is* a problem, and
19814 I'd like to fix it.
19815
19816 A future version of dwarf2out.c may generate two additional attributes for
19817 any given DW_TAG_formal_parameter DIE which will describe the "passed
19818 type" and the "passed location" for the given formal parameter in addition
19819 to the attributes we now generate to indicate the "declared type" and the
19820 "active location" for each parameter. This additional set of attributes
19821 could be used by debuggers for stack backtraces. Separately, note that
19822 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19823 This happens (for example) for inlined-instances of inline function formal
19824 parameters which are never referenced. This really shouldn't be
19825 happening. All PARM_DECL nodes should get valid non-NULL
19826 DECL_INCOMING_RTL values. FIXME. */
19827
19828 /* Use DECL_RTL as the "location" unless we find something better. */
19829 rtl = DECL_RTL_IF_SET (decl);
19830
19831 /* When generating abstract instances, ignore everything except
19832 constants, symbols living in memory, and symbols living in
19833 fixed registers. */
19834 if (! reload_completed)
19835 {
19836 if (rtl
19837 && (CONSTANT_P (rtl)
19838 || (MEM_P (rtl)
19839 && CONSTANT_P (XEXP (rtl, 0)))
19840 || (REG_P (rtl)
19841 && VAR_P (decl)
19842 && TREE_STATIC (decl))))
19843 {
19844 rtl = targetm.delegitimize_address (rtl);
19845 return rtl;
19846 }
19847 rtl = NULL_RTX;
19848 }
19849 else if (TREE_CODE (decl) == PARM_DECL)
19850 {
19851 if (rtl == NULL_RTX
19852 || is_pseudo_reg (rtl)
19853 || (MEM_P (rtl)
19854 && is_pseudo_reg (XEXP (rtl, 0))
19855 && DECL_INCOMING_RTL (decl)
19856 && MEM_P (DECL_INCOMING_RTL (decl))
19857 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19858 {
19859 tree declared_type = TREE_TYPE (decl);
19860 tree passed_type = DECL_ARG_TYPE (decl);
19861 machine_mode dmode = TYPE_MODE (declared_type);
19862 machine_mode pmode = TYPE_MODE (passed_type);
19863
19864 /* This decl represents a formal parameter which was optimized out.
19865 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19866 all cases where (rtl == NULL_RTX) just below. */
19867 if (dmode == pmode)
19868 rtl = DECL_INCOMING_RTL (decl);
19869 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19870 && SCALAR_INT_MODE_P (dmode)
19871 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19872 && DECL_INCOMING_RTL (decl))
19873 {
19874 rtx inc = DECL_INCOMING_RTL (decl);
19875 if (REG_P (inc))
19876 rtl = inc;
19877 else if (MEM_P (inc))
19878 {
19879 if (BYTES_BIG_ENDIAN)
19880 rtl = adjust_address_nv (inc, dmode,
19881 GET_MODE_SIZE (pmode)
19882 - GET_MODE_SIZE (dmode));
19883 else
19884 rtl = inc;
19885 }
19886 }
19887 }
19888
19889 /* If the parm was passed in registers, but lives on the stack, then
19890 make a big endian correction if the mode of the type of the
19891 parameter is not the same as the mode of the rtl. */
19892 /* ??? This is the same series of checks that are made in dbxout.c before
19893 we reach the big endian correction code there. It isn't clear if all
19894 of these checks are necessary here, but keeping them all is the safe
19895 thing to do. */
19896 else if (MEM_P (rtl)
19897 && XEXP (rtl, 0) != const0_rtx
19898 && ! CONSTANT_P (XEXP (rtl, 0))
19899 /* Not passed in memory. */
19900 && !MEM_P (DECL_INCOMING_RTL (decl))
19901 /* Not passed by invisible reference. */
19902 && (!REG_P (XEXP (rtl, 0))
19903 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19904 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19905 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19906 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19907 #endif
19908 )
19909 /* Big endian correction check. */
19910 && BYTES_BIG_ENDIAN
19911 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19912 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19913 UNITS_PER_WORD))
19914 {
19915 machine_mode addr_mode = get_address_mode (rtl);
19916 poly_int64 offset = (UNITS_PER_WORD
19917 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19918
19919 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19920 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19921 }
19922 }
19923 else if (VAR_P (decl)
19924 && rtl
19925 && MEM_P (rtl)
19926 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19927 {
19928 machine_mode addr_mode = get_address_mode (rtl);
19929 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19930 GET_MODE (rtl));
19931
19932 /* If a variable is declared "register" yet is smaller than
19933 a register, then if we store the variable to memory, it
19934 looks like we're storing a register-sized value, when in
19935 fact we are not. We need to adjust the offset of the
19936 storage location to reflect the actual value's bytes,
19937 else gdb will not be able to display it. */
19938 if (maybe_ne (offset, 0))
19939 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19940 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19941 }
19942
19943 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19944 and will have been substituted directly into all expressions that use it.
19945 C does not have such a concept, but C++ and other languages do. */
19946 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19947 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19948
19949 if (rtl)
19950 rtl = targetm.delegitimize_address (rtl);
19951
19952 /* If we don't look past the constant pool, we risk emitting a
19953 reference to a constant pool entry that isn't referenced from
19954 code, and thus is not emitted. */
19955 if (rtl)
19956 rtl = avoid_constant_pool_reference (rtl);
19957
19958 /* Try harder to get a rtl. If this symbol ends up not being emitted
19959 in the current CU, resolve_addr will remove the expression referencing
19960 it. */
19961 if (rtl == NULL_RTX
19962 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19963 && VAR_P (decl)
19964 && !DECL_EXTERNAL (decl)
19965 && TREE_STATIC (decl)
19966 && DECL_NAME (decl)
19967 && !DECL_HARD_REGISTER (decl)
19968 && DECL_MODE (decl) != VOIDmode)
19969 {
19970 rtl = make_decl_rtl_for_debug (decl);
19971 if (!MEM_P (rtl)
19972 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19973 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19974 rtl = NULL_RTX;
19975 }
19976
19977 return rtl;
19978 }
19979
19980 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19981 returned. If so, the decl for the COMMON block is returned, and the
19982 value is the offset into the common block for the symbol. */
19983
19984 static tree
19985 fortran_common (tree decl, HOST_WIDE_INT *value)
19986 {
19987 tree val_expr, cvar;
19988 machine_mode mode;
19989 poly_int64 bitsize, bitpos;
19990 tree offset;
19991 HOST_WIDE_INT cbitpos;
19992 int unsignedp, reversep, volatilep = 0;
19993
19994 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19995 it does not have a value (the offset into the common area), or if it
19996 is thread local (as opposed to global) then it isn't common, and shouldn't
19997 be handled as such. */
19998 if (!VAR_P (decl)
19999 || !TREE_STATIC (decl)
20000 || !DECL_HAS_VALUE_EXPR_P (decl)
20001 || !is_fortran ())
20002 return NULL_TREE;
20003
20004 val_expr = DECL_VALUE_EXPR (decl);
20005 if (TREE_CODE (val_expr) != COMPONENT_REF)
20006 return NULL_TREE;
20007
20008 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
20009 &unsignedp, &reversep, &volatilep);
20010
20011 if (cvar == NULL_TREE
20012 || !VAR_P (cvar)
20013 || DECL_ARTIFICIAL (cvar)
20014 || !TREE_PUBLIC (cvar)
20015 /* We don't expect to have to cope with variable offsets,
20016 since at present all static data must have a constant size. */
20017 || !bitpos.is_constant (&cbitpos))
20018 return NULL_TREE;
20019
20020 *value = 0;
20021 if (offset != NULL)
20022 {
20023 if (!tree_fits_shwi_p (offset))
20024 return NULL_TREE;
20025 *value = tree_to_shwi (offset);
20026 }
20027 if (cbitpos != 0)
20028 *value += cbitpos / BITS_PER_UNIT;
20029
20030 return cvar;
20031 }
20032
20033 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20034 data attribute for a variable or a parameter. We generate the
20035 DW_AT_const_value attribute only in those cases where the given variable
20036 or parameter does not have a true "location" either in memory or in a
20037 register. This can happen (for example) when a constant is passed as an
20038 actual argument in a call to an inline function. (It's possible that
20039 these things can crop up in other ways also.) Note that one type of
20040 constant value which can be passed into an inlined function is a constant
20041 pointer. This can happen for example if an actual argument in an inlined
20042 function call evaluates to a compile-time constant address.
20043
20044 CACHE_P is true if it is worth caching the location list for DECL,
20045 so that future calls can reuse it rather than regenerate it from scratch.
20046 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20047 since we will need to refer to them each time the function is inlined. */
20048
20049 static bool
20050 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20051 {
20052 rtx rtl;
20053 dw_loc_list_ref list;
20054 var_loc_list *loc_list;
20055 cached_dw_loc_list *cache;
20056
20057 if (early_dwarf)
20058 return false;
20059
20060 if (TREE_CODE (decl) == ERROR_MARK)
20061 return false;
20062
20063 if (get_AT (die, DW_AT_location)
20064 || get_AT (die, DW_AT_const_value))
20065 return true;
20066
20067 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20068 || TREE_CODE (decl) == RESULT_DECL);
20069
20070 /* Try to get some constant RTL for this decl, and use that as the value of
20071 the location. */
20072
20073 rtl = rtl_for_decl_location (decl);
20074 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20075 && add_const_value_attribute (die, rtl))
20076 return true;
20077
20078 /* See if we have single element location list that is equivalent to
20079 a constant value. That way we are better to use add_const_value_attribute
20080 rather than expanding constant value equivalent. */
20081 loc_list = lookup_decl_loc (decl);
20082 if (loc_list
20083 && loc_list->first
20084 && loc_list->first->next == NULL
20085 && NOTE_P (loc_list->first->loc)
20086 && NOTE_VAR_LOCATION (loc_list->first->loc)
20087 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20088 {
20089 struct var_loc_node *node;
20090
20091 node = loc_list->first;
20092 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20093 if (GET_CODE (rtl) == EXPR_LIST)
20094 rtl = XEXP (rtl, 0);
20095 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20096 && add_const_value_attribute (die, rtl))
20097 return true;
20098 }
20099 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20100 list several times. See if we've already cached the contents. */
20101 list = NULL;
20102 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20103 cache_p = false;
20104 if (cache_p)
20105 {
20106 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20107 if (cache)
20108 list = cache->loc_list;
20109 }
20110 if (list == NULL)
20111 {
20112 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20113 NULL);
20114 /* It is usually worth caching this result if the decl is from
20115 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20116 if (cache_p && list && list->dw_loc_next)
20117 {
20118 cached_dw_loc_list **slot
20119 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20120 DECL_UID (decl),
20121 INSERT);
20122 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20123 cache->decl_id = DECL_UID (decl);
20124 cache->loc_list = list;
20125 *slot = cache;
20126 }
20127 }
20128 if (list)
20129 {
20130 add_AT_location_description (die, DW_AT_location, list);
20131 return true;
20132 }
20133 /* None of that worked, so it must not really have a location;
20134 try adding a constant value attribute from the DECL_INITIAL. */
20135 return tree_add_const_value_attribute_for_decl (die, decl);
20136 }
20137
20138 /* Helper function for tree_add_const_value_attribute. Natively encode
20139 initializer INIT into an array. Return true if successful. */
20140
20141 static bool
20142 native_encode_initializer (tree init, unsigned char *array, int size)
20143 {
20144 tree type;
20145
20146 if (init == NULL_TREE)
20147 return false;
20148
20149 STRIP_NOPS (init);
20150 switch (TREE_CODE (init))
20151 {
20152 case STRING_CST:
20153 type = TREE_TYPE (init);
20154 if (TREE_CODE (type) == ARRAY_TYPE)
20155 {
20156 tree enttype = TREE_TYPE (type);
20157 scalar_int_mode mode;
20158
20159 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20160 || GET_MODE_SIZE (mode) != 1)
20161 return false;
20162 if (int_size_in_bytes (type) != size)
20163 return false;
20164 if (size > TREE_STRING_LENGTH (init))
20165 {
20166 memcpy (array, TREE_STRING_POINTER (init),
20167 TREE_STRING_LENGTH (init));
20168 memset (array + TREE_STRING_LENGTH (init),
20169 '\0', size - TREE_STRING_LENGTH (init));
20170 }
20171 else
20172 memcpy (array, TREE_STRING_POINTER (init), size);
20173 return true;
20174 }
20175 return false;
20176 case CONSTRUCTOR:
20177 type = TREE_TYPE (init);
20178 if (int_size_in_bytes (type) != size)
20179 return false;
20180 if (TREE_CODE (type) == ARRAY_TYPE)
20181 {
20182 HOST_WIDE_INT min_index;
20183 unsigned HOST_WIDE_INT cnt;
20184 int curpos = 0, fieldsize;
20185 constructor_elt *ce;
20186
20187 if (TYPE_DOMAIN (type) == NULL_TREE
20188 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20189 return false;
20190
20191 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20192 if (fieldsize <= 0)
20193 return false;
20194
20195 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20196 memset (array, '\0', size);
20197 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20198 {
20199 tree val = ce->value;
20200 tree index = ce->index;
20201 int pos = curpos;
20202 if (index && TREE_CODE (index) == RANGE_EXPR)
20203 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20204 * fieldsize;
20205 else if (index)
20206 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20207
20208 if (val)
20209 {
20210 STRIP_NOPS (val);
20211 if (!native_encode_initializer (val, array + pos, fieldsize))
20212 return false;
20213 }
20214 curpos = pos + fieldsize;
20215 if (index && TREE_CODE (index) == RANGE_EXPR)
20216 {
20217 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20218 - tree_to_shwi (TREE_OPERAND (index, 0));
20219 while (count-- > 0)
20220 {
20221 if (val)
20222 memcpy (array + curpos, array + pos, fieldsize);
20223 curpos += fieldsize;
20224 }
20225 }
20226 gcc_assert (curpos <= size);
20227 }
20228 return true;
20229 }
20230 else if (TREE_CODE (type) == RECORD_TYPE
20231 || TREE_CODE (type) == UNION_TYPE)
20232 {
20233 tree field = NULL_TREE;
20234 unsigned HOST_WIDE_INT cnt;
20235 constructor_elt *ce;
20236
20237 if (int_size_in_bytes (type) != size)
20238 return false;
20239
20240 if (TREE_CODE (type) == RECORD_TYPE)
20241 field = TYPE_FIELDS (type);
20242
20243 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20244 {
20245 tree val = ce->value;
20246 int pos, fieldsize;
20247
20248 if (ce->index != 0)
20249 field = ce->index;
20250
20251 if (val)
20252 STRIP_NOPS (val);
20253
20254 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20255 return false;
20256
20257 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20258 && TYPE_DOMAIN (TREE_TYPE (field))
20259 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20260 return false;
20261 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20262 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20263 return false;
20264 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20265 pos = int_byte_position (field);
20266 gcc_assert (pos + fieldsize <= size);
20267 if (val && fieldsize != 0
20268 && !native_encode_initializer (val, array + pos, fieldsize))
20269 return false;
20270 }
20271 return true;
20272 }
20273 return false;
20274 case VIEW_CONVERT_EXPR:
20275 case NON_LVALUE_EXPR:
20276 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20277 default:
20278 return native_encode_expr (init, array, size) == size;
20279 }
20280 }
20281
20282 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20283 attribute is the const value T. */
20284
20285 static bool
20286 tree_add_const_value_attribute (dw_die_ref die, tree t)
20287 {
20288 tree init;
20289 tree type = TREE_TYPE (t);
20290 rtx rtl;
20291
20292 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20293 return false;
20294
20295 init = t;
20296 gcc_assert (!DECL_P (init));
20297
20298 if (TREE_CODE (init) == INTEGER_CST)
20299 {
20300 if (tree_fits_uhwi_p (init))
20301 {
20302 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20303 return true;
20304 }
20305 if (tree_fits_shwi_p (init))
20306 {
20307 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20308 return true;
20309 }
20310 }
20311 if (! early_dwarf)
20312 {
20313 rtl = rtl_for_decl_init (init, type);
20314 if (rtl)
20315 return add_const_value_attribute (die, rtl);
20316 }
20317 /* If the host and target are sane, try harder. */
20318 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20319 && initializer_constant_valid_p (init, type))
20320 {
20321 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20322 if (size > 0 && (int) size == size)
20323 {
20324 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20325
20326 if (native_encode_initializer (init, array, size))
20327 {
20328 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20329 return true;
20330 }
20331 ggc_free (array);
20332 }
20333 }
20334 return false;
20335 }
20336
20337 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20338 attribute is the const value of T, where T is an integral constant
20339 variable with static storage duration
20340 (so it can't be a PARM_DECL or a RESULT_DECL). */
20341
20342 static bool
20343 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20344 {
20345
20346 if (!decl
20347 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20348 || (VAR_P (decl) && !TREE_STATIC (decl)))
20349 return false;
20350
20351 if (TREE_READONLY (decl)
20352 && ! TREE_THIS_VOLATILE (decl)
20353 && DECL_INITIAL (decl))
20354 /* OK */;
20355 else
20356 return false;
20357
20358 /* Don't add DW_AT_const_value if abstract origin already has one. */
20359 if (get_AT (var_die, DW_AT_const_value))
20360 return false;
20361
20362 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20363 }
20364
20365 /* Convert the CFI instructions for the current function into a
20366 location list. This is used for DW_AT_frame_base when we targeting
20367 a dwarf2 consumer that does not support the dwarf3
20368 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20369 expressions. */
20370
20371 static dw_loc_list_ref
20372 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20373 {
20374 int ix;
20375 dw_fde_ref fde;
20376 dw_loc_list_ref list, *list_tail;
20377 dw_cfi_ref cfi;
20378 dw_cfa_location last_cfa, next_cfa;
20379 const char *start_label, *last_label, *section;
20380 dw_cfa_location remember;
20381
20382 fde = cfun->fde;
20383 gcc_assert (fde != NULL);
20384
20385 section = secname_for_decl (current_function_decl);
20386 list_tail = &list;
20387 list = NULL;
20388
20389 memset (&next_cfa, 0, sizeof (next_cfa));
20390 next_cfa.reg = INVALID_REGNUM;
20391 remember = next_cfa;
20392
20393 start_label = fde->dw_fde_begin;
20394
20395 /* ??? Bald assumption that the CIE opcode list does not contain
20396 advance opcodes. */
20397 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20398 lookup_cfa_1 (cfi, &next_cfa, &remember);
20399
20400 last_cfa = next_cfa;
20401 last_label = start_label;
20402
20403 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20404 {
20405 /* If the first partition contained no CFI adjustments, the
20406 CIE opcodes apply to the whole first partition. */
20407 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20408 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20409 list_tail =&(*list_tail)->dw_loc_next;
20410 start_label = last_label = fde->dw_fde_second_begin;
20411 }
20412
20413 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20414 {
20415 switch (cfi->dw_cfi_opc)
20416 {
20417 case DW_CFA_set_loc:
20418 case DW_CFA_advance_loc1:
20419 case DW_CFA_advance_loc2:
20420 case DW_CFA_advance_loc4:
20421 if (!cfa_equal_p (&last_cfa, &next_cfa))
20422 {
20423 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20424 start_label, 0, last_label, 0, section);
20425
20426 list_tail = &(*list_tail)->dw_loc_next;
20427 last_cfa = next_cfa;
20428 start_label = last_label;
20429 }
20430 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20431 break;
20432
20433 case DW_CFA_advance_loc:
20434 /* The encoding is complex enough that we should never emit this. */
20435 gcc_unreachable ();
20436
20437 default:
20438 lookup_cfa_1 (cfi, &next_cfa, &remember);
20439 break;
20440 }
20441 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20442 {
20443 if (!cfa_equal_p (&last_cfa, &next_cfa))
20444 {
20445 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20446 start_label, 0, last_label, 0, section);
20447
20448 list_tail = &(*list_tail)->dw_loc_next;
20449 last_cfa = next_cfa;
20450 start_label = last_label;
20451 }
20452 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20453 start_label, 0, fde->dw_fde_end, 0, section);
20454 list_tail = &(*list_tail)->dw_loc_next;
20455 start_label = last_label = fde->dw_fde_second_begin;
20456 }
20457 }
20458
20459 if (!cfa_equal_p (&last_cfa, &next_cfa))
20460 {
20461 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20462 start_label, 0, last_label, 0, section);
20463 list_tail = &(*list_tail)->dw_loc_next;
20464 start_label = last_label;
20465 }
20466
20467 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20468 start_label, 0,
20469 fde->dw_fde_second_begin
20470 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20471 section);
20472
20473 maybe_gen_llsym (list);
20474
20475 return list;
20476 }
20477
20478 /* Compute a displacement from the "steady-state frame pointer" to the
20479 frame base (often the same as the CFA), and store it in
20480 frame_pointer_fb_offset. OFFSET is added to the displacement
20481 before the latter is negated. */
20482
20483 static void
20484 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20485 {
20486 rtx reg, elim;
20487
20488 #ifdef FRAME_POINTER_CFA_OFFSET
20489 reg = frame_pointer_rtx;
20490 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20491 #else
20492 reg = arg_pointer_rtx;
20493 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20494 #endif
20495
20496 elim = (ira_use_lra_p
20497 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20498 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20499 elim = strip_offset_and_add (elim, &offset);
20500
20501 frame_pointer_fb_offset = -offset;
20502
20503 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20504 in which to eliminate. This is because it's stack pointer isn't
20505 directly accessible as a register within the ISA. To work around
20506 this, assume that while we cannot provide a proper value for
20507 frame_pointer_fb_offset, we won't need one either. */
20508 frame_pointer_fb_offset_valid
20509 = ((SUPPORTS_STACK_ALIGNMENT
20510 && (elim == hard_frame_pointer_rtx
20511 || elim == stack_pointer_rtx))
20512 || elim == (frame_pointer_needed
20513 ? hard_frame_pointer_rtx
20514 : stack_pointer_rtx));
20515 }
20516
20517 /* Generate a DW_AT_name attribute given some string value to be included as
20518 the value of the attribute. */
20519
20520 static void
20521 add_name_attribute (dw_die_ref die, const char *name_string)
20522 {
20523 if (name_string != NULL && *name_string != 0)
20524 {
20525 if (demangle_name_func)
20526 name_string = (*demangle_name_func) (name_string);
20527
20528 add_AT_string (die, DW_AT_name, name_string);
20529 }
20530 }
20531
20532 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20533 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20534 of TYPE accordingly.
20535
20536 ??? This is a temporary measure until after we're able to generate
20537 regular DWARF for the complex Ada type system. */
20538
20539 static void
20540 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20541 dw_die_ref context_die)
20542 {
20543 tree dtype;
20544 dw_die_ref dtype_die;
20545
20546 if (!lang_hooks.types.descriptive_type)
20547 return;
20548
20549 dtype = lang_hooks.types.descriptive_type (type);
20550 if (!dtype)
20551 return;
20552
20553 dtype_die = lookup_type_die (dtype);
20554 if (!dtype_die)
20555 {
20556 gen_type_die (dtype, context_die);
20557 dtype_die = lookup_type_die (dtype);
20558 gcc_assert (dtype_die);
20559 }
20560
20561 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20562 }
20563
20564 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20565
20566 static const char *
20567 comp_dir_string (void)
20568 {
20569 const char *wd;
20570 char *wd1;
20571 static const char *cached_wd = NULL;
20572
20573 if (cached_wd != NULL)
20574 return cached_wd;
20575
20576 wd = get_src_pwd ();
20577 if (wd == NULL)
20578 return NULL;
20579
20580 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20581 {
20582 int wdlen;
20583
20584 wdlen = strlen (wd);
20585 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20586 strcpy (wd1, wd);
20587 wd1 [wdlen] = DIR_SEPARATOR;
20588 wd1 [wdlen + 1] = 0;
20589 wd = wd1;
20590 }
20591
20592 cached_wd = remap_debug_filename (wd);
20593 return cached_wd;
20594 }
20595
20596 /* Generate a DW_AT_comp_dir attribute for DIE. */
20597
20598 static void
20599 add_comp_dir_attribute (dw_die_ref die)
20600 {
20601 const char * wd = comp_dir_string ();
20602 if (wd != NULL)
20603 add_AT_string (die, DW_AT_comp_dir, wd);
20604 }
20605
20606 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20607 pointer computation, ...), output a representation for that bound according
20608 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20609 loc_list_from_tree for the meaning of CONTEXT. */
20610
20611 static void
20612 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20613 int forms, struct loc_descr_context *context)
20614 {
20615 dw_die_ref context_die, decl_die = NULL;
20616 dw_loc_list_ref list;
20617 bool strip_conversions = true;
20618 bool placeholder_seen = false;
20619
20620 while (strip_conversions)
20621 switch (TREE_CODE (value))
20622 {
20623 case ERROR_MARK:
20624 case SAVE_EXPR:
20625 return;
20626
20627 CASE_CONVERT:
20628 case VIEW_CONVERT_EXPR:
20629 value = TREE_OPERAND (value, 0);
20630 break;
20631
20632 default:
20633 strip_conversions = false;
20634 break;
20635 }
20636
20637 /* If possible and permitted, output the attribute as a constant. */
20638 if ((forms & dw_scalar_form_constant) != 0
20639 && TREE_CODE (value) == INTEGER_CST)
20640 {
20641 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20642
20643 /* If HOST_WIDE_INT is big enough then represent the bound as
20644 a constant value. We need to choose a form based on
20645 whether the type is signed or unsigned. We cannot just
20646 call add_AT_unsigned if the value itself is positive
20647 (add_AT_unsigned might add the unsigned value encoded as
20648 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20649 bounds type and then sign extend any unsigned values found
20650 for signed types. This is needed only for
20651 DW_AT_{lower,upper}_bound, since for most other attributes,
20652 consumers will treat DW_FORM_data[1248] as unsigned values,
20653 regardless of the underlying type. */
20654 if (prec <= HOST_BITS_PER_WIDE_INT
20655 || tree_fits_uhwi_p (value))
20656 {
20657 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20658 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20659 else
20660 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20661 }
20662 else
20663 /* Otherwise represent the bound as an unsigned value with
20664 the precision of its type. The precision and signedness
20665 of the type will be necessary to re-interpret it
20666 unambiguously. */
20667 add_AT_wide (die, attr, wi::to_wide (value));
20668 return;
20669 }
20670
20671 /* Otherwise, if it's possible and permitted too, output a reference to
20672 another DIE. */
20673 if ((forms & dw_scalar_form_reference) != 0)
20674 {
20675 tree decl = NULL_TREE;
20676
20677 /* Some type attributes reference an outer type. For instance, the upper
20678 bound of an array may reference an embedding record (this happens in
20679 Ada). */
20680 if (TREE_CODE (value) == COMPONENT_REF
20681 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20682 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20683 decl = TREE_OPERAND (value, 1);
20684
20685 else if (VAR_P (value)
20686 || TREE_CODE (value) == PARM_DECL
20687 || TREE_CODE (value) == RESULT_DECL)
20688 decl = value;
20689
20690 if (decl != NULL_TREE)
20691 {
20692 decl_die = lookup_decl_die (decl);
20693
20694 /* ??? Can this happen, or should the variable have been bound
20695 first? Probably it can, since I imagine that we try to create
20696 the types of parameters in the order in which they exist in
20697 the list, and won't have created a forward reference to a
20698 later parameter. */
20699 if (decl_die != NULL)
20700 {
20701 if (get_AT (decl_die, DW_AT_location)
20702 || get_AT (decl_die, DW_AT_const_value))
20703 {
20704 add_AT_die_ref (die, attr, decl_die);
20705 return;
20706 }
20707 }
20708 }
20709 }
20710
20711 /* Last chance: try to create a stack operation procedure to evaluate the
20712 value. Do nothing if even that is not possible or permitted. */
20713 if ((forms & dw_scalar_form_exprloc) == 0)
20714 return;
20715
20716 list = loc_list_from_tree (value, 2, context);
20717 if (context && context->placeholder_arg)
20718 {
20719 placeholder_seen = context->placeholder_seen;
20720 context->placeholder_seen = false;
20721 }
20722 if (list == NULL || single_element_loc_list_p (list))
20723 {
20724 /* If this attribute is not a reference nor constant, it is
20725 a DWARF expression rather than location description. For that
20726 loc_list_from_tree (value, 0, &context) is needed. */
20727 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20728 if (list2 && single_element_loc_list_p (list2))
20729 {
20730 if (placeholder_seen)
20731 {
20732 struct dwarf_procedure_info dpi;
20733 dpi.fndecl = NULL_TREE;
20734 dpi.args_count = 1;
20735 if (!resolve_args_picking (list2->expr, 1, &dpi))
20736 return;
20737 }
20738 add_AT_loc (die, attr, list2->expr);
20739 return;
20740 }
20741 }
20742
20743 /* If that failed to give a single element location list, fall back to
20744 outputting this as a reference... still if permitted. */
20745 if (list == NULL
20746 || (forms & dw_scalar_form_reference) == 0
20747 || placeholder_seen)
20748 return;
20749
20750 if (!decl_die)
20751 {
20752 if (current_function_decl == 0)
20753 context_die = comp_unit_die ();
20754 else
20755 context_die = lookup_decl_die (current_function_decl);
20756
20757 decl_die = new_die (DW_TAG_variable, context_die, value);
20758 add_AT_flag (decl_die, DW_AT_artificial, 1);
20759 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20760 context_die);
20761 }
20762
20763 add_AT_location_description (decl_die, DW_AT_location, list);
20764 add_AT_die_ref (die, attr, decl_die);
20765 }
20766
20767 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20768 default. */
20769
20770 static int
20771 lower_bound_default (void)
20772 {
20773 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20774 {
20775 case DW_LANG_C:
20776 case DW_LANG_C89:
20777 case DW_LANG_C99:
20778 case DW_LANG_C11:
20779 case DW_LANG_C_plus_plus:
20780 case DW_LANG_C_plus_plus_11:
20781 case DW_LANG_C_plus_plus_14:
20782 case DW_LANG_ObjC:
20783 case DW_LANG_ObjC_plus_plus:
20784 return 0;
20785 case DW_LANG_Fortran77:
20786 case DW_LANG_Fortran90:
20787 case DW_LANG_Fortran95:
20788 case DW_LANG_Fortran03:
20789 case DW_LANG_Fortran08:
20790 return 1;
20791 case DW_LANG_UPC:
20792 case DW_LANG_D:
20793 case DW_LANG_Python:
20794 return dwarf_version >= 4 ? 0 : -1;
20795 case DW_LANG_Ada95:
20796 case DW_LANG_Ada83:
20797 case DW_LANG_Cobol74:
20798 case DW_LANG_Cobol85:
20799 case DW_LANG_Modula2:
20800 case DW_LANG_PLI:
20801 return dwarf_version >= 4 ? 1 : -1;
20802 default:
20803 return -1;
20804 }
20805 }
20806
20807 /* Given a tree node describing an array bound (either lower or upper) output
20808 a representation for that bound. */
20809
20810 static void
20811 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20812 tree bound, struct loc_descr_context *context)
20813 {
20814 int dflt;
20815
20816 while (1)
20817 switch (TREE_CODE (bound))
20818 {
20819 /* Strip all conversions. */
20820 CASE_CONVERT:
20821 case VIEW_CONVERT_EXPR:
20822 bound = TREE_OPERAND (bound, 0);
20823 break;
20824
20825 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20826 are even omitted when they are the default. */
20827 case INTEGER_CST:
20828 /* If the value for this bound is the default one, we can even omit the
20829 attribute. */
20830 if (bound_attr == DW_AT_lower_bound
20831 && tree_fits_shwi_p (bound)
20832 && (dflt = lower_bound_default ()) != -1
20833 && tree_to_shwi (bound) == dflt)
20834 return;
20835
20836 /* FALLTHRU */
20837
20838 default:
20839 /* Because of the complex interaction there can be with other GNAT
20840 encodings, GDB isn't ready yet to handle proper DWARF description
20841 for self-referencial subrange bounds: let GNAT encodings do the
20842 magic in such a case. */
20843 if (is_ada ()
20844 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20845 && contains_placeholder_p (bound))
20846 return;
20847
20848 add_scalar_info (subrange_die, bound_attr, bound,
20849 dw_scalar_form_constant
20850 | dw_scalar_form_exprloc
20851 | dw_scalar_form_reference,
20852 context);
20853 return;
20854 }
20855 }
20856
20857 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20858 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20859 Note that the block of subscript information for an array type also
20860 includes information about the element type of the given array type.
20861
20862 This function reuses previously set type and bound information if
20863 available. */
20864
20865 static void
20866 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20867 {
20868 unsigned dimension_number;
20869 tree lower, upper;
20870 dw_die_ref child = type_die->die_child;
20871
20872 for (dimension_number = 0;
20873 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20874 type = TREE_TYPE (type), dimension_number++)
20875 {
20876 tree domain = TYPE_DOMAIN (type);
20877
20878 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20879 break;
20880
20881 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20882 and (in GNU C only) variable bounds. Handle all three forms
20883 here. */
20884
20885 /* Find and reuse a previously generated DW_TAG_subrange_type if
20886 available.
20887
20888 For multi-dimensional arrays, as we iterate through the
20889 various dimensions in the enclosing for loop above, we also
20890 iterate through the DIE children and pick at each
20891 DW_TAG_subrange_type previously generated (if available).
20892 Each child DW_TAG_subrange_type DIE describes the range of
20893 the current dimension. At this point we should have as many
20894 DW_TAG_subrange_type's as we have dimensions in the
20895 array. */
20896 dw_die_ref subrange_die = NULL;
20897 if (child)
20898 while (1)
20899 {
20900 child = child->die_sib;
20901 if (child->die_tag == DW_TAG_subrange_type)
20902 subrange_die = child;
20903 if (child == type_die->die_child)
20904 {
20905 /* If we wrapped around, stop looking next time. */
20906 child = NULL;
20907 break;
20908 }
20909 if (child->die_tag == DW_TAG_subrange_type)
20910 break;
20911 }
20912 if (!subrange_die)
20913 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20914
20915 if (domain)
20916 {
20917 /* We have an array type with specified bounds. */
20918 lower = TYPE_MIN_VALUE (domain);
20919 upper = TYPE_MAX_VALUE (domain);
20920
20921 /* Define the index type. */
20922 if (TREE_TYPE (domain)
20923 && !get_AT (subrange_die, DW_AT_type))
20924 {
20925 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20926 TREE_TYPE field. We can't emit debug info for this
20927 because it is an unnamed integral type. */
20928 if (TREE_CODE (domain) == INTEGER_TYPE
20929 && TYPE_NAME (domain) == NULL_TREE
20930 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20931 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20932 ;
20933 else
20934 add_type_attribute (subrange_die, TREE_TYPE (domain),
20935 TYPE_UNQUALIFIED, false, type_die);
20936 }
20937
20938 /* ??? If upper is NULL, the array has unspecified length,
20939 but it does have a lower bound. This happens with Fortran
20940 dimension arr(N:*)
20941 Since the debugger is definitely going to need to know N
20942 to produce useful results, go ahead and output the lower
20943 bound solo, and hope the debugger can cope. */
20944
20945 if (!get_AT (subrange_die, DW_AT_lower_bound))
20946 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20947 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20948 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20949 }
20950
20951 /* Otherwise we have an array type with an unspecified length. The
20952 DWARF-2 spec does not say how to handle this; let's just leave out the
20953 bounds. */
20954 }
20955 }
20956
20957 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20958
20959 static void
20960 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20961 {
20962 dw_die_ref decl_die;
20963 HOST_WIDE_INT size;
20964 dw_loc_descr_ref size_expr = NULL;
20965
20966 switch (TREE_CODE (tree_node))
20967 {
20968 case ERROR_MARK:
20969 size = 0;
20970 break;
20971 case ENUMERAL_TYPE:
20972 case RECORD_TYPE:
20973 case UNION_TYPE:
20974 case QUAL_UNION_TYPE:
20975 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20976 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20977 {
20978 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20979 return;
20980 }
20981 size_expr = type_byte_size (tree_node, &size);
20982 break;
20983 case FIELD_DECL:
20984 /* For a data member of a struct or union, the DW_AT_byte_size is
20985 generally given as the number of bytes normally allocated for an
20986 object of the *declared* type of the member itself. This is true
20987 even for bit-fields. */
20988 size = int_size_in_bytes (field_type (tree_node));
20989 break;
20990 default:
20991 gcc_unreachable ();
20992 }
20993
20994 /* Support for dynamically-sized objects was introduced by DWARFv3.
20995 At the moment, GDB does not handle variable byte sizes very well,
20996 though. */
20997 if ((dwarf_version >= 3 || !dwarf_strict)
20998 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20999 && size_expr != NULL)
21000 add_AT_loc (die, DW_AT_byte_size, size_expr);
21001
21002 /* Note that `size' might be -1 when we get to this point. If it is, that
21003 indicates that the byte size of the entity in question is variable and
21004 that we could not generate a DWARF expression that computes it. */
21005 if (size >= 0)
21006 add_AT_unsigned (die, DW_AT_byte_size, size);
21007 }
21008
21009 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
21010 alignment. */
21011
21012 static void
21013 add_alignment_attribute (dw_die_ref die, tree tree_node)
21014 {
21015 if (dwarf_version < 5 && dwarf_strict)
21016 return;
21017
21018 unsigned align;
21019
21020 if (DECL_P (tree_node))
21021 {
21022 if (!DECL_USER_ALIGN (tree_node))
21023 return;
21024
21025 align = DECL_ALIGN_UNIT (tree_node);
21026 }
21027 else if (TYPE_P (tree_node))
21028 {
21029 if (!TYPE_USER_ALIGN (tree_node))
21030 return;
21031
21032 align = TYPE_ALIGN_UNIT (tree_node);
21033 }
21034 else
21035 gcc_unreachable ();
21036
21037 add_AT_unsigned (die, DW_AT_alignment, align);
21038 }
21039
21040 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21041 which specifies the distance in bits from the highest order bit of the
21042 "containing object" for the bit-field to the highest order bit of the
21043 bit-field itself.
21044
21045 For any given bit-field, the "containing object" is a hypothetical object
21046 (of some integral or enum type) within which the given bit-field lives. The
21047 type of this hypothetical "containing object" is always the same as the
21048 declared type of the individual bit-field itself. The determination of the
21049 exact location of the "containing object" for a bit-field is rather
21050 complicated. It's handled by the `field_byte_offset' function (above).
21051
21052 CTX is required: see the comment for VLR_CONTEXT.
21053
21054 Note that it is the size (in bytes) of the hypothetical "containing object"
21055 which will be given in the DW_AT_byte_size attribute for this bit-field.
21056 (See `byte_size_attribute' above). */
21057
21058 static inline void
21059 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21060 {
21061 HOST_WIDE_INT object_offset_in_bytes;
21062 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21063 HOST_WIDE_INT bitpos_int;
21064 HOST_WIDE_INT highest_order_object_bit_offset;
21065 HOST_WIDE_INT highest_order_field_bit_offset;
21066 HOST_WIDE_INT bit_offset;
21067
21068 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21069
21070 /* Must be a field and a bit field. */
21071 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21072
21073 /* We can't yet handle bit-fields whose offsets are variable, so if we
21074 encounter such things, just return without generating any attribute
21075 whatsoever. Likewise for variable or too large size. */
21076 if (! tree_fits_shwi_p (bit_position (decl))
21077 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21078 return;
21079
21080 bitpos_int = int_bit_position (decl);
21081
21082 /* Note that the bit offset is always the distance (in bits) from the
21083 highest-order bit of the "containing object" to the highest-order bit of
21084 the bit-field itself. Since the "high-order end" of any object or field
21085 is different on big-endian and little-endian machines, the computation
21086 below must take account of these differences. */
21087 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21088 highest_order_field_bit_offset = bitpos_int;
21089
21090 if (! BYTES_BIG_ENDIAN)
21091 {
21092 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21093 highest_order_object_bit_offset +=
21094 simple_type_size_in_bits (original_type);
21095 }
21096
21097 bit_offset
21098 = (! BYTES_BIG_ENDIAN
21099 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21100 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21101
21102 if (bit_offset < 0)
21103 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21104 else
21105 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21106 }
21107
21108 /* For a FIELD_DECL node which represents a bit field, output an attribute
21109 which specifies the length in bits of the given field. */
21110
21111 static inline void
21112 add_bit_size_attribute (dw_die_ref die, tree decl)
21113 {
21114 /* Must be a field and a bit field. */
21115 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21116 && DECL_BIT_FIELD_TYPE (decl));
21117
21118 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21119 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21120 }
21121
21122 /* If the compiled language is ANSI C, then add a 'prototyped'
21123 attribute, if arg types are given for the parameters of a function. */
21124
21125 static inline void
21126 add_prototyped_attribute (dw_die_ref die, tree func_type)
21127 {
21128 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21129 {
21130 case DW_LANG_C:
21131 case DW_LANG_C89:
21132 case DW_LANG_C99:
21133 case DW_LANG_C11:
21134 case DW_LANG_ObjC:
21135 if (prototype_p (func_type))
21136 add_AT_flag (die, DW_AT_prototyped, 1);
21137 break;
21138 default:
21139 break;
21140 }
21141 }
21142
21143 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21144 by looking in the type declaration, the object declaration equate table or
21145 the block mapping. */
21146
21147 static inline dw_die_ref
21148 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21149 {
21150 dw_die_ref origin_die = NULL;
21151
21152 if (DECL_P (origin))
21153 {
21154 dw_die_ref c;
21155 origin_die = lookup_decl_die (origin);
21156 /* "Unwrap" the decls DIE which we put in the imported unit context.
21157 We are looking for the abstract copy here. */
21158 if (in_lto_p
21159 && origin_die
21160 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
21161 /* ??? Identify this better. */
21162 && c->with_offset)
21163 origin_die = c;
21164 }
21165 else if (TYPE_P (origin))
21166 origin_die = lookup_type_die (origin);
21167 else if (TREE_CODE (origin) == BLOCK)
21168 origin_die = BLOCK_DIE (origin);
21169
21170 /* XXX: Functions that are never lowered don't always have correct block
21171 trees (in the case of java, they simply have no block tree, in some other
21172 languages). For these functions, there is nothing we can really do to
21173 output correct debug info for inlined functions in all cases. Rather
21174 than die, we'll just produce deficient debug info now, in that we will
21175 have variables without a proper abstract origin. In the future, when all
21176 functions are lowered, we should re-add a gcc_assert (origin_die)
21177 here. */
21178
21179 if (origin_die)
21180 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21181 return origin_die;
21182 }
21183
21184 /* We do not currently support the pure_virtual attribute. */
21185
21186 static inline void
21187 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21188 {
21189 if (DECL_VINDEX (func_decl))
21190 {
21191 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21192
21193 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21194 add_AT_loc (die, DW_AT_vtable_elem_location,
21195 new_loc_descr (DW_OP_constu,
21196 tree_to_shwi (DECL_VINDEX (func_decl)),
21197 0));
21198
21199 /* GNU extension: Record what type this method came from originally. */
21200 if (debug_info_level > DINFO_LEVEL_TERSE
21201 && DECL_CONTEXT (func_decl))
21202 add_AT_die_ref (die, DW_AT_containing_type,
21203 lookup_type_die (DECL_CONTEXT (func_decl)));
21204 }
21205 }
21206 \f
21207 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21208 given decl. This used to be a vendor extension until after DWARF 4
21209 standardized it. */
21210
21211 static void
21212 add_linkage_attr (dw_die_ref die, tree decl)
21213 {
21214 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21215
21216 /* Mimic what assemble_name_raw does with a leading '*'. */
21217 if (name[0] == '*')
21218 name = &name[1];
21219
21220 if (dwarf_version >= 4)
21221 add_AT_string (die, DW_AT_linkage_name, name);
21222 else
21223 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21224 }
21225
21226 /* Add source coordinate attributes for the given decl. */
21227
21228 static void
21229 add_src_coords_attributes (dw_die_ref die, tree decl)
21230 {
21231 expanded_location s;
21232
21233 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21234 return;
21235 s = expand_location (DECL_SOURCE_LOCATION (decl));
21236 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21237 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21238 if (debug_column_info && s.column)
21239 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21240 }
21241
21242 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21243
21244 static void
21245 add_linkage_name_raw (dw_die_ref die, tree decl)
21246 {
21247 /* Defer until we have an assembler name set. */
21248 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21249 {
21250 limbo_die_node *asm_name;
21251
21252 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21253 asm_name->die = die;
21254 asm_name->created_for = decl;
21255 asm_name->next = deferred_asm_name;
21256 deferred_asm_name = asm_name;
21257 }
21258 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21259 add_linkage_attr (die, decl);
21260 }
21261
21262 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21263
21264 static void
21265 add_linkage_name (dw_die_ref die, tree decl)
21266 {
21267 if (debug_info_level > DINFO_LEVEL_NONE
21268 && VAR_OR_FUNCTION_DECL_P (decl)
21269 && TREE_PUBLIC (decl)
21270 && !(VAR_P (decl) && DECL_REGISTER (decl))
21271 && die->die_tag != DW_TAG_member)
21272 add_linkage_name_raw (die, decl);
21273 }
21274
21275 /* Add a DW_AT_name attribute and source coordinate attribute for the
21276 given decl, but only if it actually has a name. */
21277
21278 static void
21279 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21280 bool no_linkage_name)
21281 {
21282 tree decl_name;
21283
21284 decl_name = DECL_NAME (decl);
21285 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21286 {
21287 const char *name = dwarf2_name (decl, 0);
21288 if (name)
21289 add_name_attribute (die, name);
21290 if (! DECL_ARTIFICIAL (decl))
21291 add_src_coords_attributes (die, decl);
21292
21293 if (!no_linkage_name)
21294 add_linkage_name (die, decl);
21295 }
21296
21297 #ifdef VMS_DEBUGGING_INFO
21298 /* Get the function's name, as described by its RTL. This may be different
21299 from the DECL_NAME name used in the source file. */
21300 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21301 {
21302 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21303 XEXP (DECL_RTL (decl), 0), false);
21304 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21305 }
21306 #endif /* VMS_DEBUGGING_INFO */
21307 }
21308
21309 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21310
21311 static void
21312 add_discr_value (dw_die_ref die, dw_discr_value *value)
21313 {
21314 dw_attr_node attr;
21315
21316 attr.dw_attr = DW_AT_discr_value;
21317 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21318 attr.dw_attr_val.val_entry = NULL;
21319 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21320 if (value->pos)
21321 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21322 else
21323 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21324 add_dwarf_attr (die, &attr);
21325 }
21326
21327 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21328
21329 static void
21330 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21331 {
21332 dw_attr_node attr;
21333
21334 attr.dw_attr = DW_AT_discr_list;
21335 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21336 attr.dw_attr_val.val_entry = NULL;
21337 attr.dw_attr_val.v.val_discr_list = discr_list;
21338 add_dwarf_attr (die, &attr);
21339 }
21340
21341 static inline dw_discr_list_ref
21342 AT_discr_list (dw_attr_node *attr)
21343 {
21344 return attr->dw_attr_val.v.val_discr_list;
21345 }
21346
21347 #ifdef VMS_DEBUGGING_INFO
21348 /* Output the debug main pointer die for VMS */
21349
21350 void
21351 dwarf2out_vms_debug_main_pointer (void)
21352 {
21353 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21354 dw_die_ref die;
21355
21356 /* Allocate the VMS debug main subprogram die. */
21357 die = new_die_raw (DW_TAG_subprogram);
21358 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21359 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21360 current_function_funcdef_no);
21361 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21362
21363 /* Make it the first child of comp_unit_die (). */
21364 die->die_parent = comp_unit_die ();
21365 if (comp_unit_die ()->die_child)
21366 {
21367 die->die_sib = comp_unit_die ()->die_child->die_sib;
21368 comp_unit_die ()->die_child->die_sib = die;
21369 }
21370 else
21371 {
21372 die->die_sib = die;
21373 comp_unit_die ()->die_child = die;
21374 }
21375 }
21376 #endif /* VMS_DEBUGGING_INFO */
21377
21378 /* walk_tree helper function for uses_local_type, below. */
21379
21380 static tree
21381 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21382 {
21383 if (!TYPE_P (*tp))
21384 *walk_subtrees = 0;
21385 else
21386 {
21387 tree name = TYPE_NAME (*tp);
21388 if (name && DECL_P (name) && decl_function_context (name))
21389 return *tp;
21390 }
21391 return NULL_TREE;
21392 }
21393
21394 /* If TYPE involves a function-local type (including a local typedef to a
21395 non-local type), returns that type; otherwise returns NULL_TREE. */
21396
21397 static tree
21398 uses_local_type (tree type)
21399 {
21400 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21401 return used;
21402 }
21403
21404 /* Return the DIE for the scope that immediately contains this type.
21405 Non-named types that do not involve a function-local type get global
21406 scope. Named types nested in namespaces or other types get their
21407 containing scope. All other types (i.e. function-local named types) get
21408 the current active scope. */
21409
21410 static dw_die_ref
21411 scope_die_for (tree t, dw_die_ref context_die)
21412 {
21413 dw_die_ref scope_die = NULL;
21414 tree containing_scope;
21415
21416 /* Non-types always go in the current scope. */
21417 gcc_assert (TYPE_P (t));
21418
21419 /* Use the scope of the typedef, rather than the scope of the type
21420 it refers to. */
21421 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21422 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21423 else
21424 containing_scope = TYPE_CONTEXT (t);
21425
21426 /* Use the containing namespace if there is one. */
21427 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21428 {
21429 if (context_die == lookup_decl_die (containing_scope))
21430 /* OK */;
21431 else if (debug_info_level > DINFO_LEVEL_TERSE)
21432 context_die = get_context_die (containing_scope);
21433 else
21434 containing_scope = NULL_TREE;
21435 }
21436
21437 /* Ignore function type "scopes" from the C frontend. They mean that
21438 a tagged type is local to a parmlist of a function declarator, but
21439 that isn't useful to DWARF. */
21440 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21441 containing_scope = NULL_TREE;
21442
21443 if (SCOPE_FILE_SCOPE_P (containing_scope))
21444 {
21445 /* If T uses a local type keep it local as well, to avoid references
21446 to function-local DIEs from outside the function. */
21447 if (current_function_decl && uses_local_type (t))
21448 scope_die = context_die;
21449 else
21450 scope_die = comp_unit_die ();
21451 }
21452 else if (TYPE_P (containing_scope))
21453 {
21454 /* For types, we can just look up the appropriate DIE. */
21455 if (debug_info_level > DINFO_LEVEL_TERSE)
21456 scope_die = get_context_die (containing_scope);
21457 else
21458 {
21459 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21460 if (scope_die == NULL)
21461 scope_die = comp_unit_die ();
21462 }
21463 }
21464 else
21465 scope_die = context_die;
21466
21467 return scope_die;
21468 }
21469
21470 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21471
21472 static inline int
21473 local_scope_p (dw_die_ref context_die)
21474 {
21475 for (; context_die; context_die = context_die->die_parent)
21476 if (context_die->die_tag == DW_TAG_inlined_subroutine
21477 || context_die->die_tag == DW_TAG_subprogram)
21478 return 1;
21479
21480 return 0;
21481 }
21482
21483 /* Returns nonzero if CONTEXT_DIE is a class. */
21484
21485 static inline int
21486 class_scope_p (dw_die_ref context_die)
21487 {
21488 return (context_die
21489 && (context_die->die_tag == DW_TAG_structure_type
21490 || context_die->die_tag == DW_TAG_class_type
21491 || context_die->die_tag == DW_TAG_interface_type
21492 || context_die->die_tag == DW_TAG_union_type));
21493 }
21494
21495 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21496 whether or not to treat a DIE in this context as a declaration. */
21497
21498 static inline int
21499 class_or_namespace_scope_p (dw_die_ref context_die)
21500 {
21501 return (class_scope_p (context_die)
21502 || (context_die && context_die->die_tag == DW_TAG_namespace));
21503 }
21504
21505 /* Many forms of DIEs require a "type description" attribute. This
21506 routine locates the proper "type descriptor" die for the type given
21507 by 'type' plus any additional qualifiers given by 'cv_quals', and
21508 adds a DW_AT_type attribute below the given die. */
21509
21510 static void
21511 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21512 bool reverse, dw_die_ref context_die)
21513 {
21514 enum tree_code code = TREE_CODE (type);
21515 dw_die_ref type_die = NULL;
21516
21517 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21518 or fixed-point type, use the inner type. This is because we have no
21519 support for unnamed types in base_type_die. This can happen if this is
21520 an Ada subrange type. Correct solution is emit a subrange type die. */
21521 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21522 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21523 type = TREE_TYPE (type), code = TREE_CODE (type);
21524
21525 if (code == ERROR_MARK
21526 /* Handle a special case. For functions whose return type is void, we
21527 generate *no* type attribute. (Note that no object may have type
21528 `void', so this only applies to function return types). */
21529 || code == VOID_TYPE)
21530 return;
21531
21532 type_die = modified_type_die (type,
21533 cv_quals | TYPE_QUALS (type),
21534 reverse,
21535 context_die);
21536
21537 if (type_die != NULL)
21538 add_AT_die_ref (object_die, DW_AT_type, type_die);
21539 }
21540
21541 /* Given an object die, add the calling convention attribute for the
21542 function call type. */
21543 static void
21544 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21545 {
21546 enum dwarf_calling_convention value = DW_CC_normal;
21547
21548 value = ((enum dwarf_calling_convention)
21549 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21550
21551 if (is_fortran ()
21552 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21553 {
21554 /* DWARF 2 doesn't provide a way to identify a program's source-level
21555 entry point. DW_AT_calling_convention attributes are only meant
21556 to describe functions' calling conventions. However, lacking a
21557 better way to signal the Fortran main program, we used this for
21558 a long time, following existing custom. Now, DWARF 4 has
21559 DW_AT_main_subprogram, which we add below, but some tools still
21560 rely on the old way, which we thus keep. */
21561 value = DW_CC_program;
21562
21563 if (dwarf_version >= 4 || !dwarf_strict)
21564 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21565 }
21566
21567 /* Only add the attribute if the backend requests it, and
21568 is not DW_CC_normal. */
21569 if (value && (value != DW_CC_normal))
21570 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21571 }
21572
21573 /* Given a tree pointer to a struct, class, union, or enum type node, return
21574 a pointer to the (string) tag name for the given type, or zero if the type
21575 was declared without a tag. */
21576
21577 static const char *
21578 type_tag (const_tree type)
21579 {
21580 const char *name = 0;
21581
21582 if (TYPE_NAME (type) != 0)
21583 {
21584 tree t = 0;
21585
21586 /* Find the IDENTIFIER_NODE for the type name. */
21587 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21588 && !TYPE_NAMELESS (type))
21589 t = TYPE_NAME (type);
21590
21591 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21592 a TYPE_DECL node, regardless of whether or not a `typedef' was
21593 involved. */
21594 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21595 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21596 {
21597 /* We want to be extra verbose. Don't call dwarf_name if
21598 DECL_NAME isn't set. The default hook for decl_printable_name
21599 doesn't like that, and in this context it's correct to return
21600 0, instead of "<anonymous>" or the like. */
21601 if (DECL_NAME (TYPE_NAME (type))
21602 && !DECL_NAMELESS (TYPE_NAME (type)))
21603 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21604 }
21605
21606 /* Now get the name as a string, or invent one. */
21607 if (!name && t != 0)
21608 name = IDENTIFIER_POINTER (t);
21609 }
21610
21611 return (name == 0 || *name == '\0') ? 0 : name;
21612 }
21613
21614 /* Return the type associated with a data member, make a special check
21615 for bit field types. */
21616
21617 static inline tree
21618 member_declared_type (const_tree member)
21619 {
21620 return (DECL_BIT_FIELD_TYPE (member)
21621 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21622 }
21623
21624 /* Get the decl's label, as described by its RTL. This may be different
21625 from the DECL_NAME name used in the source file. */
21626
21627 #if 0
21628 static const char *
21629 decl_start_label (tree decl)
21630 {
21631 rtx x;
21632 const char *fnname;
21633
21634 x = DECL_RTL (decl);
21635 gcc_assert (MEM_P (x));
21636
21637 x = XEXP (x, 0);
21638 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21639
21640 fnname = XSTR (x, 0);
21641 return fnname;
21642 }
21643 #endif
21644 \f
21645 /* For variable-length arrays that have been previously generated, but
21646 may be incomplete due to missing subscript info, fill the subscript
21647 info. Return TRUE if this is one of those cases. */
21648 static bool
21649 fill_variable_array_bounds (tree type)
21650 {
21651 if (TREE_ASM_WRITTEN (type)
21652 && TREE_CODE (type) == ARRAY_TYPE
21653 && variably_modified_type_p (type, NULL))
21654 {
21655 dw_die_ref array_die = lookup_type_die (type);
21656 if (!array_die)
21657 return false;
21658 add_subscript_info (array_die, type, !is_ada ());
21659 return true;
21660 }
21661 return false;
21662 }
21663
21664 /* These routines generate the internal representation of the DIE's for
21665 the compilation unit. Debugging information is collected by walking
21666 the declaration trees passed in from dwarf2out_decl(). */
21667
21668 static void
21669 gen_array_type_die (tree type, dw_die_ref context_die)
21670 {
21671 dw_die_ref array_die;
21672
21673 /* GNU compilers represent multidimensional array types as sequences of one
21674 dimensional array types whose element types are themselves array types.
21675 We sometimes squish that down to a single array_type DIE with multiple
21676 subscripts in the Dwarf debugging info. The draft Dwarf specification
21677 say that we are allowed to do this kind of compression in C, because
21678 there is no difference between an array of arrays and a multidimensional
21679 array. We don't do this for Ada to remain as close as possible to the
21680 actual representation, which is especially important against the language
21681 flexibilty wrt arrays of variable size. */
21682
21683 bool collapse_nested_arrays = !is_ada ();
21684
21685 if (fill_variable_array_bounds (type))
21686 return;
21687
21688 dw_die_ref scope_die = scope_die_for (type, context_die);
21689 tree element_type;
21690
21691 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21692 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21693 if (TYPE_STRING_FLAG (type)
21694 && TREE_CODE (type) == ARRAY_TYPE
21695 && is_fortran ()
21696 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21697 {
21698 HOST_WIDE_INT size;
21699
21700 array_die = new_die (DW_TAG_string_type, scope_die, type);
21701 add_name_attribute (array_die, type_tag (type));
21702 equate_type_number_to_die (type, array_die);
21703 size = int_size_in_bytes (type);
21704 if (size >= 0)
21705 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21706 /* ??? We can't annotate types late, but for LTO we may not
21707 generate a location early either (gfortran.dg/save_6.f90). */
21708 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21709 && TYPE_DOMAIN (type) != NULL_TREE
21710 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21711 {
21712 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21713 tree rszdecl = szdecl;
21714
21715 size = int_size_in_bytes (TREE_TYPE (szdecl));
21716 if (!DECL_P (szdecl))
21717 {
21718 if (TREE_CODE (szdecl) == INDIRECT_REF
21719 && DECL_P (TREE_OPERAND (szdecl, 0)))
21720 {
21721 rszdecl = TREE_OPERAND (szdecl, 0);
21722 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21723 != DWARF2_ADDR_SIZE)
21724 size = 0;
21725 }
21726 else
21727 size = 0;
21728 }
21729 if (size > 0)
21730 {
21731 dw_loc_list_ref loc
21732 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21733 NULL);
21734 if (loc)
21735 {
21736 add_AT_location_description (array_die, DW_AT_string_length,
21737 loc);
21738 if (size != DWARF2_ADDR_SIZE)
21739 add_AT_unsigned (array_die, dwarf_version >= 5
21740 ? DW_AT_string_length_byte_size
21741 : DW_AT_byte_size, size);
21742 }
21743 }
21744 }
21745 return;
21746 }
21747
21748 array_die = new_die (DW_TAG_array_type, scope_die, type);
21749 add_name_attribute (array_die, type_tag (type));
21750 equate_type_number_to_die (type, array_die);
21751
21752 if (TREE_CODE (type) == VECTOR_TYPE)
21753 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21754
21755 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21756 if (is_fortran ()
21757 && TREE_CODE (type) == ARRAY_TYPE
21758 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21759 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21760 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21761
21762 #if 0
21763 /* We default the array ordering. Debuggers will probably do the right
21764 things even if DW_AT_ordering is not present. It's not even an issue
21765 until we start to get into multidimensional arrays anyway. If a debugger
21766 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21767 then we'll have to put the DW_AT_ordering attribute back in. (But if
21768 and when we find out that we need to put these in, we will only do so
21769 for multidimensional arrays. */
21770 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21771 #endif
21772
21773 if (TREE_CODE (type) == VECTOR_TYPE)
21774 {
21775 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21776 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21777 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21778 add_bound_info (subrange_die, DW_AT_upper_bound,
21779 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21780 }
21781 else
21782 add_subscript_info (array_die, type, collapse_nested_arrays);
21783
21784 /* Add representation of the type of the elements of this array type and
21785 emit the corresponding DIE if we haven't done it already. */
21786 element_type = TREE_TYPE (type);
21787 if (collapse_nested_arrays)
21788 while (TREE_CODE (element_type) == ARRAY_TYPE)
21789 {
21790 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21791 break;
21792 element_type = TREE_TYPE (element_type);
21793 }
21794
21795 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21796 TREE_CODE (type) == ARRAY_TYPE
21797 && TYPE_REVERSE_STORAGE_ORDER (type),
21798 context_die);
21799
21800 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21801 if (TYPE_ARTIFICIAL (type))
21802 add_AT_flag (array_die, DW_AT_artificial, 1);
21803
21804 if (get_AT (array_die, DW_AT_name))
21805 add_pubtype (type, array_die);
21806
21807 add_alignment_attribute (array_die, type);
21808 }
21809
21810 /* This routine generates DIE for array with hidden descriptor, details
21811 are filled into *info by a langhook. */
21812
21813 static void
21814 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21815 dw_die_ref context_die)
21816 {
21817 const dw_die_ref scope_die = scope_die_for (type, context_die);
21818 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21819 struct loc_descr_context context = { type, info->base_decl, NULL,
21820 false, false };
21821 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21822 int dim;
21823
21824 add_name_attribute (array_die, type_tag (type));
21825 equate_type_number_to_die (type, array_die);
21826
21827 if (info->ndimensions > 1)
21828 switch (info->ordering)
21829 {
21830 case array_descr_ordering_row_major:
21831 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21832 break;
21833 case array_descr_ordering_column_major:
21834 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21835 break;
21836 default:
21837 break;
21838 }
21839
21840 if (dwarf_version >= 3 || !dwarf_strict)
21841 {
21842 if (info->data_location)
21843 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21844 dw_scalar_form_exprloc, &context);
21845 if (info->associated)
21846 add_scalar_info (array_die, DW_AT_associated, info->associated,
21847 dw_scalar_form_constant
21848 | dw_scalar_form_exprloc
21849 | dw_scalar_form_reference, &context);
21850 if (info->allocated)
21851 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21852 dw_scalar_form_constant
21853 | dw_scalar_form_exprloc
21854 | dw_scalar_form_reference, &context);
21855 if (info->stride)
21856 {
21857 const enum dwarf_attribute attr
21858 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21859 const int forms
21860 = (info->stride_in_bits)
21861 ? dw_scalar_form_constant
21862 : (dw_scalar_form_constant
21863 | dw_scalar_form_exprloc
21864 | dw_scalar_form_reference);
21865
21866 add_scalar_info (array_die, attr, info->stride, forms, &context);
21867 }
21868 }
21869 if (dwarf_version >= 5)
21870 {
21871 if (info->rank)
21872 {
21873 add_scalar_info (array_die, DW_AT_rank, info->rank,
21874 dw_scalar_form_constant
21875 | dw_scalar_form_exprloc, &context);
21876 subrange_tag = DW_TAG_generic_subrange;
21877 context.placeholder_arg = true;
21878 }
21879 }
21880
21881 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21882
21883 for (dim = 0; dim < info->ndimensions; dim++)
21884 {
21885 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21886
21887 if (info->dimen[dim].bounds_type)
21888 add_type_attribute (subrange_die,
21889 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21890 false, context_die);
21891 if (info->dimen[dim].lower_bound)
21892 add_bound_info (subrange_die, DW_AT_lower_bound,
21893 info->dimen[dim].lower_bound, &context);
21894 if (info->dimen[dim].upper_bound)
21895 add_bound_info (subrange_die, DW_AT_upper_bound,
21896 info->dimen[dim].upper_bound, &context);
21897 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21898 add_scalar_info (subrange_die, DW_AT_byte_stride,
21899 info->dimen[dim].stride,
21900 dw_scalar_form_constant
21901 | dw_scalar_form_exprloc
21902 | dw_scalar_form_reference,
21903 &context);
21904 }
21905
21906 gen_type_die (info->element_type, context_die);
21907 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21908 TREE_CODE (type) == ARRAY_TYPE
21909 && TYPE_REVERSE_STORAGE_ORDER (type),
21910 context_die);
21911
21912 if (get_AT (array_die, DW_AT_name))
21913 add_pubtype (type, array_die);
21914
21915 add_alignment_attribute (array_die, type);
21916 }
21917
21918 #if 0
21919 static void
21920 gen_entry_point_die (tree decl, dw_die_ref context_die)
21921 {
21922 tree origin = decl_ultimate_origin (decl);
21923 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21924
21925 if (origin != NULL)
21926 add_abstract_origin_attribute (decl_die, origin);
21927 else
21928 {
21929 add_name_and_src_coords_attributes (decl_die, decl);
21930 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21931 TYPE_UNQUALIFIED, false, context_die);
21932 }
21933
21934 if (DECL_ABSTRACT_P (decl))
21935 equate_decl_number_to_die (decl, decl_die);
21936 else
21937 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21938 }
21939 #endif
21940
21941 /* Walk through the list of incomplete types again, trying once more to
21942 emit full debugging info for them. */
21943
21944 static void
21945 retry_incomplete_types (void)
21946 {
21947 set_early_dwarf s;
21948 int i;
21949
21950 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21951 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21952 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21953 vec_safe_truncate (incomplete_types, 0);
21954 }
21955
21956 /* Determine what tag to use for a record type. */
21957
21958 static enum dwarf_tag
21959 record_type_tag (tree type)
21960 {
21961 if (! lang_hooks.types.classify_record)
21962 return DW_TAG_structure_type;
21963
21964 switch (lang_hooks.types.classify_record (type))
21965 {
21966 case RECORD_IS_STRUCT:
21967 return DW_TAG_structure_type;
21968
21969 case RECORD_IS_CLASS:
21970 return DW_TAG_class_type;
21971
21972 case RECORD_IS_INTERFACE:
21973 if (dwarf_version >= 3 || !dwarf_strict)
21974 return DW_TAG_interface_type;
21975 return DW_TAG_structure_type;
21976
21977 default:
21978 gcc_unreachable ();
21979 }
21980 }
21981
21982 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21983 include all of the information about the enumeration values also. Each
21984 enumerated type name/value is listed as a child of the enumerated type
21985 DIE. */
21986
21987 static dw_die_ref
21988 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21989 {
21990 dw_die_ref type_die = lookup_type_die (type);
21991 dw_die_ref orig_type_die = type_die;
21992
21993 if (type_die == NULL)
21994 {
21995 type_die = new_die (DW_TAG_enumeration_type,
21996 scope_die_for (type, context_die), type);
21997 equate_type_number_to_die (type, type_die);
21998 add_name_attribute (type_die, type_tag (type));
21999 if ((dwarf_version >= 4 || !dwarf_strict)
22000 && ENUM_IS_SCOPED (type))
22001 add_AT_flag (type_die, DW_AT_enum_class, 1);
22002 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
22003 add_AT_flag (type_die, DW_AT_declaration, 1);
22004 if (!dwarf_strict)
22005 add_AT_unsigned (type_die, DW_AT_encoding,
22006 TYPE_UNSIGNED (type)
22007 ? DW_ATE_unsigned
22008 : DW_ATE_signed);
22009 }
22010 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22011 return type_die;
22012 else
22013 remove_AT (type_die, DW_AT_declaration);
22014
22015 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22016 given enum type is incomplete, do not generate the DW_AT_byte_size
22017 attribute or the DW_AT_element_list attribute. */
22018 if (TYPE_SIZE (type))
22019 {
22020 tree link;
22021
22022 if (!ENUM_IS_OPAQUE (type))
22023 TREE_ASM_WRITTEN (type) = 1;
22024 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22025 add_byte_size_attribute (type_die, type);
22026 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22027 add_alignment_attribute (type_die, type);
22028 if ((dwarf_version >= 3 || !dwarf_strict)
22029 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22030 {
22031 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22032 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22033 context_die);
22034 }
22035 if (TYPE_STUB_DECL (type) != NULL_TREE)
22036 {
22037 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22038 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22039 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22040 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22041 }
22042
22043 /* If the first reference to this type was as the return type of an
22044 inline function, then it may not have a parent. Fix this now. */
22045 if (type_die->die_parent == NULL)
22046 add_child_die (scope_die_for (type, context_die), type_die);
22047
22048 for (link = TYPE_VALUES (type);
22049 link != NULL; link = TREE_CHAIN (link))
22050 {
22051 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22052 tree value = TREE_VALUE (link);
22053
22054 gcc_assert (!ENUM_IS_OPAQUE (type));
22055 add_name_attribute (enum_die,
22056 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22057
22058 if (TREE_CODE (value) == CONST_DECL)
22059 value = DECL_INITIAL (value);
22060
22061 if (simple_type_size_in_bits (TREE_TYPE (value))
22062 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22063 {
22064 /* For constant forms created by add_AT_unsigned DWARF
22065 consumers (GDB, elfutils, etc.) always zero extend
22066 the value. Only when the actual value is negative
22067 do we need to use add_AT_int to generate a constant
22068 form that can represent negative values. */
22069 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22070 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22071 add_AT_unsigned (enum_die, DW_AT_const_value,
22072 (unsigned HOST_WIDE_INT) val);
22073 else
22074 add_AT_int (enum_die, DW_AT_const_value, val);
22075 }
22076 else
22077 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22078 that here. TODO: This should be re-worked to use correct
22079 signed/unsigned double tags for all cases. */
22080 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22081 }
22082
22083 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22084 if (TYPE_ARTIFICIAL (type)
22085 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22086 add_AT_flag (type_die, DW_AT_artificial, 1);
22087 }
22088 else
22089 add_AT_flag (type_die, DW_AT_declaration, 1);
22090
22091 add_pubtype (type, type_die);
22092
22093 return type_die;
22094 }
22095
22096 /* Generate a DIE to represent either a real live formal parameter decl or to
22097 represent just the type of some formal parameter position in some function
22098 type.
22099
22100 Note that this routine is a bit unusual because its argument may be a
22101 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22102 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22103 node. If it's the former then this function is being called to output a
22104 DIE to represent a formal parameter object (or some inlining thereof). If
22105 it's the latter, then this function is only being called to output a
22106 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22107 argument type of some subprogram type.
22108 If EMIT_NAME_P is true, name and source coordinate attributes
22109 are emitted. */
22110
22111 static dw_die_ref
22112 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22113 dw_die_ref context_die)
22114 {
22115 tree node_or_origin = node ? node : origin;
22116 tree ultimate_origin;
22117 dw_die_ref parm_die = NULL;
22118
22119 if (DECL_P (node_or_origin))
22120 {
22121 parm_die = lookup_decl_die (node);
22122
22123 /* If the contexts differ, we may not be talking about the same
22124 thing.
22125 ??? When in LTO the DIE parent is the "abstract" copy and the
22126 context_die is the specification "copy". But this whole block
22127 should eventually be no longer needed. */
22128 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22129 {
22130 if (!DECL_ABSTRACT_P (node))
22131 {
22132 /* This can happen when creating an inlined instance, in
22133 which case we need to create a new DIE that will get
22134 annotated with DW_AT_abstract_origin. */
22135 parm_die = NULL;
22136 }
22137 else
22138 gcc_unreachable ();
22139 }
22140
22141 if (parm_die && parm_die->die_parent == NULL)
22142 {
22143 /* Check that parm_die already has the right attributes that
22144 we would have added below. If any attributes are
22145 missing, fall through to add them. */
22146 if (! DECL_ABSTRACT_P (node_or_origin)
22147 && !get_AT (parm_die, DW_AT_location)
22148 && !get_AT (parm_die, DW_AT_const_value))
22149 /* We are missing location info, and are about to add it. */
22150 ;
22151 else
22152 {
22153 add_child_die (context_die, parm_die);
22154 return parm_die;
22155 }
22156 }
22157 }
22158
22159 /* If we have a previously generated DIE, use it, unless this is an
22160 concrete instance (origin != NULL), in which case we need a new
22161 DIE with a corresponding DW_AT_abstract_origin. */
22162 bool reusing_die;
22163 if (parm_die && origin == NULL)
22164 reusing_die = true;
22165 else
22166 {
22167 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22168 reusing_die = false;
22169 }
22170
22171 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22172 {
22173 case tcc_declaration:
22174 ultimate_origin = decl_ultimate_origin (node_or_origin);
22175 if (node || ultimate_origin)
22176 origin = ultimate_origin;
22177
22178 if (reusing_die)
22179 goto add_location;
22180
22181 if (origin != NULL)
22182 add_abstract_origin_attribute (parm_die, origin);
22183 else if (emit_name_p)
22184 add_name_and_src_coords_attributes (parm_die, node);
22185 if (origin == NULL
22186 || (! DECL_ABSTRACT_P (node_or_origin)
22187 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22188 decl_function_context
22189 (node_or_origin))))
22190 {
22191 tree type = TREE_TYPE (node_or_origin);
22192 if (decl_by_reference_p (node_or_origin))
22193 add_type_attribute (parm_die, TREE_TYPE (type),
22194 TYPE_UNQUALIFIED,
22195 false, context_die);
22196 else
22197 add_type_attribute (parm_die, type,
22198 decl_quals (node_or_origin),
22199 false, context_die);
22200 }
22201 if (origin == NULL && DECL_ARTIFICIAL (node))
22202 add_AT_flag (parm_die, DW_AT_artificial, 1);
22203 add_location:
22204 if (node && node != origin)
22205 equate_decl_number_to_die (node, parm_die);
22206 if (! DECL_ABSTRACT_P (node_or_origin))
22207 add_location_or_const_value_attribute (parm_die, node_or_origin,
22208 node == NULL);
22209
22210 break;
22211
22212 case tcc_type:
22213 /* We were called with some kind of a ..._TYPE node. */
22214 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22215 context_die);
22216 break;
22217
22218 default:
22219 gcc_unreachable ();
22220 }
22221
22222 return parm_die;
22223 }
22224
22225 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22226 children DW_TAG_formal_parameter DIEs representing the arguments of the
22227 parameter pack.
22228
22229 PARM_PACK must be a function parameter pack.
22230 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22231 must point to the subsequent arguments of the function PACK_ARG belongs to.
22232 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22233 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22234 following the last one for which a DIE was generated. */
22235
22236 static dw_die_ref
22237 gen_formal_parameter_pack_die (tree parm_pack,
22238 tree pack_arg,
22239 dw_die_ref subr_die,
22240 tree *next_arg)
22241 {
22242 tree arg;
22243 dw_die_ref parm_pack_die;
22244
22245 gcc_assert (parm_pack
22246 && lang_hooks.function_parameter_pack_p (parm_pack)
22247 && subr_die);
22248
22249 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22250 add_src_coords_attributes (parm_pack_die, parm_pack);
22251
22252 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22253 {
22254 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22255 parm_pack))
22256 break;
22257 gen_formal_parameter_die (arg, NULL,
22258 false /* Don't emit name attribute. */,
22259 parm_pack_die);
22260 }
22261 if (next_arg)
22262 *next_arg = arg;
22263 return parm_pack_die;
22264 }
22265
22266 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22267 at the end of an (ANSI prototyped) formal parameters list. */
22268
22269 static void
22270 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22271 {
22272 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22273 }
22274
22275 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22276 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22277 parameters as specified in some function type specification (except for
22278 those which appear as part of a function *definition*). */
22279
22280 static void
22281 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22282 {
22283 tree link;
22284 tree formal_type = NULL;
22285 tree first_parm_type;
22286 tree arg;
22287
22288 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22289 {
22290 arg = DECL_ARGUMENTS (function_or_method_type);
22291 function_or_method_type = TREE_TYPE (function_or_method_type);
22292 }
22293 else
22294 arg = NULL_TREE;
22295
22296 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22297
22298 /* Make our first pass over the list of formal parameter types and output a
22299 DW_TAG_formal_parameter DIE for each one. */
22300 for (link = first_parm_type; link; )
22301 {
22302 dw_die_ref parm_die;
22303
22304 formal_type = TREE_VALUE (link);
22305 if (formal_type == void_type_node)
22306 break;
22307
22308 /* Output a (nameless) DIE to represent the formal parameter itself. */
22309 parm_die = gen_formal_parameter_die (formal_type, NULL,
22310 true /* Emit name attribute. */,
22311 context_die);
22312 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22313 && link == first_parm_type)
22314 {
22315 add_AT_flag (parm_die, DW_AT_artificial, 1);
22316 if (dwarf_version >= 3 || !dwarf_strict)
22317 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22318 }
22319 else if (arg && DECL_ARTIFICIAL (arg))
22320 add_AT_flag (parm_die, DW_AT_artificial, 1);
22321
22322 link = TREE_CHAIN (link);
22323 if (arg)
22324 arg = DECL_CHAIN (arg);
22325 }
22326
22327 /* If this function type has an ellipsis, add a
22328 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22329 if (formal_type != void_type_node)
22330 gen_unspecified_parameters_die (function_or_method_type, context_die);
22331
22332 /* Make our second (and final) pass over the list of formal parameter types
22333 and output DIEs to represent those types (as necessary). */
22334 for (link = TYPE_ARG_TYPES (function_or_method_type);
22335 link && TREE_VALUE (link);
22336 link = TREE_CHAIN (link))
22337 gen_type_die (TREE_VALUE (link), context_die);
22338 }
22339
22340 /* We want to generate the DIE for TYPE so that we can generate the
22341 die for MEMBER, which has been defined; we will need to refer back
22342 to the member declaration nested within TYPE. If we're trying to
22343 generate minimal debug info for TYPE, processing TYPE won't do the
22344 trick; we need to attach the member declaration by hand. */
22345
22346 static void
22347 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22348 {
22349 gen_type_die (type, context_die);
22350
22351 /* If we're trying to avoid duplicate debug info, we may not have
22352 emitted the member decl for this function. Emit it now. */
22353 if (TYPE_STUB_DECL (type)
22354 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22355 && ! lookup_decl_die (member))
22356 {
22357 dw_die_ref type_die;
22358 gcc_assert (!decl_ultimate_origin (member));
22359
22360 type_die = lookup_type_die_strip_naming_typedef (type);
22361 if (TREE_CODE (member) == FUNCTION_DECL)
22362 gen_subprogram_die (member, type_die);
22363 else if (TREE_CODE (member) == FIELD_DECL)
22364 {
22365 /* Ignore the nameless fields that are used to skip bits but handle
22366 C++ anonymous unions and structs. */
22367 if (DECL_NAME (member) != NULL_TREE
22368 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22369 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22370 {
22371 struct vlr_context vlr_ctx = {
22372 DECL_CONTEXT (member), /* struct_type */
22373 NULL_TREE /* variant_part_offset */
22374 };
22375 gen_type_die (member_declared_type (member), type_die);
22376 gen_field_die (member, &vlr_ctx, type_die);
22377 }
22378 }
22379 else
22380 gen_variable_die (member, NULL_TREE, type_die);
22381 }
22382 }
22383 \f
22384 /* Forward declare these functions, because they are mutually recursive
22385 with their set_block_* pairing functions. */
22386 static void set_decl_origin_self (tree);
22387
22388 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22389 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22390 that it points to the node itself, thus indicating that the node is its
22391 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22392 the given node is NULL, recursively descend the decl/block tree which
22393 it is the root of, and for each other ..._DECL or BLOCK node contained
22394 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22395 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22396 values to point to themselves. */
22397
22398 static void
22399 set_block_origin_self (tree stmt)
22400 {
22401 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22402 {
22403 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22404
22405 {
22406 tree local_decl;
22407
22408 for (local_decl = BLOCK_VARS (stmt);
22409 local_decl != NULL_TREE;
22410 local_decl = DECL_CHAIN (local_decl))
22411 /* Do not recurse on nested functions since the inlining status
22412 of parent and child can be different as per the DWARF spec. */
22413 if (TREE_CODE (local_decl) != FUNCTION_DECL
22414 && !DECL_EXTERNAL (local_decl))
22415 set_decl_origin_self (local_decl);
22416 }
22417
22418 {
22419 tree subblock;
22420
22421 for (subblock = BLOCK_SUBBLOCKS (stmt);
22422 subblock != NULL_TREE;
22423 subblock = BLOCK_CHAIN (subblock))
22424 set_block_origin_self (subblock); /* Recurse. */
22425 }
22426 }
22427 }
22428
22429 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22430 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22431 node to so that it points to the node itself, thus indicating that the
22432 node represents its own (abstract) origin. Additionally, if the
22433 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22434 the decl/block tree of which the given node is the root of, and for
22435 each other ..._DECL or BLOCK node contained therein whose
22436 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22437 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22438 point to themselves. */
22439
22440 static void
22441 set_decl_origin_self (tree decl)
22442 {
22443 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22444 {
22445 DECL_ABSTRACT_ORIGIN (decl) = decl;
22446 if (TREE_CODE (decl) == FUNCTION_DECL)
22447 {
22448 tree arg;
22449
22450 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22451 DECL_ABSTRACT_ORIGIN (arg) = arg;
22452 if (DECL_INITIAL (decl) != NULL_TREE
22453 && DECL_INITIAL (decl) != error_mark_node)
22454 set_block_origin_self (DECL_INITIAL (decl));
22455 }
22456 }
22457 }
22458 \f
22459 /* Mark the early DIE for DECL as the abstract instance. */
22460
22461 static void
22462 dwarf2out_abstract_function (tree decl)
22463 {
22464 dw_die_ref old_die;
22465
22466 /* Make sure we have the actual abstract inline, not a clone. */
22467 decl = DECL_ORIGIN (decl);
22468
22469 if (DECL_IGNORED_P (decl))
22470 return;
22471
22472 old_die = lookup_decl_die (decl);
22473 /* With early debug we always have an old DIE unless we are in LTO
22474 and the user did not compile but only link with debug. */
22475 if (in_lto_p && ! old_die)
22476 return;
22477 gcc_assert (old_die != NULL);
22478 if (get_AT (old_die, DW_AT_inline)
22479 || get_AT (old_die, DW_AT_abstract_origin))
22480 /* We've already generated the abstract instance. */
22481 return;
22482
22483 /* Go ahead and put DW_AT_inline on the DIE. */
22484 if (DECL_DECLARED_INLINE_P (decl))
22485 {
22486 if (cgraph_function_possibly_inlined_p (decl))
22487 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22488 else
22489 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22490 }
22491 else
22492 {
22493 if (cgraph_function_possibly_inlined_p (decl))
22494 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22495 else
22496 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22497 }
22498
22499 if (DECL_DECLARED_INLINE_P (decl)
22500 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22501 add_AT_flag (old_die, DW_AT_artificial, 1);
22502
22503 set_decl_origin_self (decl);
22504 }
22505
22506 /* Helper function of premark_used_types() which gets called through
22507 htab_traverse.
22508
22509 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22510 marked as unused by prune_unused_types. */
22511
22512 bool
22513 premark_used_types_helper (tree const &type, void *)
22514 {
22515 dw_die_ref die;
22516
22517 die = lookup_type_die (type);
22518 if (die != NULL)
22519 die->die_perennial_p = 1;
22520 return true;
22521 }
22522
22523 /* Helper function of premark_types_used_by_global_vars which gets called
22524 through htab_traverse.
22525
22526 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22527 marked as unused by prune_unused_types. The DIE of the type is marked
22528 only if the global variable using the type will actually be emitted. */
22529
22530 int
22531 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22532 void *)
22533 {
22534 struct types_used_by_vars_entry *entry;
22535 dw_die_ref die;
22536
22537 entry = (struct types_used_by_vars_entry *) *slot;
22538 gcc_assert (entry->type != NULL
22539 && entry->var_decl != NULL);
22540 die = lookup_type_die (entry->type);
22541 if (die)
22542 {
22543 /* Ask cgraph if the global variable really is to be emitted.
22544 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22545 varpool_node *node = varpool_node::get (entry->var_decl);
22546 if (node && node->definition)
22547 {
22548 die->die_perennial_p = 1;
22549 /* Keep the parent DIEs as well. */
22550 while ((die = die->die_parent) && die->die_perennial_p == 0)
22551 die->die_perennial_p = 1;
22552 }
22553 }
22554 return 1;
22555 }
22556
22557 /* Mark all members of used_types_hash as perennial. */
22558
22559 static void
22560 premark_used_types (struct function *fun)
22561 {
22562 if (fun && fun->used_types_hash)
22563 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22564 }
22565
22566 /* Mark all members of types_used_by_vars_entry as perennial. */
22567
22568 static void
22569 premark_types_used_by_global_vars (void)
22570 {
22571 if (types_used_by_vars_hash)
22572 types_used_by_vars_hash
22573 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22574 }
22575
22576 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22577 for CA_LOC call arg loc node. */
22578
22579 static dw_die_ref
22580 gen_call_site_die (tree decl, dw_die_ref subr_die,
22581 struct call_arg_loc_node *ca_loc)
22582 {
22583 dw_die_ref stmt_die = NULL, die;
22584 tree block = ca_loc->block;
22585
22586 while (block
22587 && block != DECL_INITIAL (decl)
22588 && TREE_CODE (block) == BLOCK)
22589 {
22590 stmt_die = BLOCK_DIE (block);
22591 if (stmt_die)
22592 break;
22593 block = BLOCK_SUPERCONTEXT (block);
22594 }
22595 if (stmt_die == NULL)
22596 stmt_die = subr_die;
22597 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22598 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22599 if (ca_loc->tail_call_p)
22600 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22601 if (ca_loc->symbol_ref)
22602 {
22603 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22604 if (tdie)
22605 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22606 else
22607 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22608 false);
22609 }
22610 return die;
22611 }
22612
22613 /* Generate a DIE to represent a declared function (either file-scope or
22614 block-local). */
22615
22616 static void
22617 gen_subprogram_die (tree decl, dw_die_ref context_die)
22618 {
22619 tree origin = decl_ultimate_origin (decl);
22620 dw_die_ref subr_die;
22621 dw_die_ref old_die = lookup_decl_die (decl);
22622
22623 /* This function gets called multiple times for different stages of
22624 the debug process. For example, for func() in this code:
22625
22626 namespace S
22627 {
22628 void func() { ... }
22629 }
22630
22631 ...we get called 4 times. Twice in early debug and twice in
22632 late debug:
22633
22634 Early debug
22635 -----------
22636
22637 1. Once while generating func() within the namespace. This is
22638 the declaration. The declaration bit below is set, as the
22639 context is the namespace.
22640
22641 A new DIE will be generated with DW_AT_declaration set.
22642
22643 2. Once for func() itself. This is the specification. The
22644 declaration bit below is clear as the context is the CU.
22645
22646 We will use the cached DIE from (1) to create a new DIE with
22647 DW_AT_specification pointing to the declaration in (1).
22648
22649 Late debug via rest_of_handle_final()
22650 -------------------------------------
22651
22652 3. Once generating func() within the namespace. This is also the
22653 declaration, as in (1), but this time we will early exit below
22654 as we have a cached DIE and a declaration needs no additional
22655 annotations (no locations), as the source declaration line
22656 info is enough.
22657
22658 4. Once for func() itself. As in (2), this is the specification,
22659 but this time we will re-use the cached DIE, and just annotate
22660 it with the location information that should now be available.
22661
22662 For something without namespaces, but with abstract instances, we
22663 are also called a multiple times:
22664
22665 class Base
22666 {
22667 public:
22668 Base (); // constructor declaration (1)
22669 };
22670
22671 Base::Base () { } // constructor specification (2)
22672
22673 Early debug
22674 -----------
22675
22676 1. Once for the Base() constructor by virtue of it being a
22677 member of the Base class. This is done via
22678 rest_of_type_compilation.
22679
22680 This is a declaration, so a new DIE will be created with
22681 DW_AT_declaration.
22682
22683 2. Once for the Base() constructor definition, but this time
22684 while generating the abstract instance of the base
22685 constructor (__base_ctor) which is being generated via early
22686 debug of reachable functions.
22687
22688 Even though we have a cached version of the declaration (1),
22689 we will create a DW_AT_specification of the declaration DIE
22690 in (1).
22691
22692 3. Once for the __base_ctor itself, but this time, we generate
22693 an DW_AT_abstract_origin version of the DW_AT_specification in
22694 (2).
22695
22696 Late debug via rest_of_handle_final
22697 -----------------------------------
22698
22699 4. One final time for the __base_ctor (which will have a cached
22700 DIE with DW_AT_abstract_origin created in (3). This time,
22701 we will just annotate the location information now
22702 available.
22703 */
22704 int declaration = (current_function_decl != decl
22705 || class_or_namespace_scope_p (context_die));
22706
22707 /* A declaration that has been previously dumped needs no
22708 additional information. */
22709 if (old_die && declaration)
22710 return;
22711
22712 /* Now that the C++ front end lazily declares artificial member fns, we
22713 might need to retrofit the declaration into its class. */
22714 if (!declaration && !origin && !old_die
22715 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22716 && !class_or_namespace_scope_p (context_die)
22717 && debug_info_level > DINFO_LEVEL_TERSE)
22718 old_die = force_decl_die (decl);
22719
22720 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22721 if (origin != NULL)
22722 {
22723 gcc_assert (!declaration || local_scope_p (context_die));
22724
22725 /* Fixup die_parent for the abstract instance of a nested
22726 inline function. */
22727 if (old_die && old_die->die_parent == NULL)
22728 add_child_die (context_die, old_die);
22729
22730 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22731 {
22732 /* If we have a DW_AT_abstract_origin we have a working
22733 cached version. */
22734 subr_die = old_die;
22735 }
22736 else
22737 {
22738 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22739 add_abstract_origin_attribute (subr_die, origin);
22740 /* This is where the actual code for a cloned function is.
22741 Let's emit linkage name attribute for it. This helps
22742 debuggers to e.g, set breakpoints into
22743 constructors/destructors when the user asks "break
22744 K::K". */
22745 add_linkage_name (subr_die, decl);
22746 }
22747 }
22748 /* A cached copy, possibly from early dwarf generation. Reuse as
22749 much as possible. */
22750 else if (old_die)
22751 {
22752 if (!get_AT_flag (old_die, DW_AT_declaration)
22753 /* We can have a normal definition following an inline one in the
22754 case of redefinition of GNU C extern inlines.
22755 It seems reasonable to use AT_specification in this case. */
22756 && !get_AT (old_die, DW_AT_inline))
22757 {
22758 /* Detect and ignore this case, where we are trying to output
22759 something we have already output. */
22760 if (get_AT (old_die, DW_AT_low_pc)
22761 || get_AT (old_die, DW_AT_ranges))
22762 return;
22763
22764 /* If we have no location information, this must be a
22765 partially generated DIE from early dwarf generation.
22766 Fall through and generate it. */
22767 }
22768
22769 /* If the definition comes from the same place as the declaration,
22770 maybe use the old DIE. We always want the DIE for this function
22771 that has the *_pc attributes to be under comp_unit_die so the
22772 debugger can find it. We also need to do this for abstract
22773 instances of inlines, since the spec requires the out-of-line copy
22774 to have the same parent. For local class methods, this doesn't
22775 apply; we just use the old DIE. */
22776 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22777 struct dwarf_file_data * file_index = lookup_filename (s.file);
22778 if (((is_unit_die (old_die->die_parent)
22779 /* This condition fixes the inconsistency/ICE with the
22780 following Fortran test (or some derivative thereof) while
22781 building libgfortran:
22782
22783 module some_m
22784 contains
22785 logical function funky (FLAG)
22786 funky = .true.
22787 end function
22788 end module
22789 */
22790 || (old_die->die_parent
22791 && old_die->die_parent->die_tag == DW_TAG_module)
22792 || local_scope_p (old_die->die_parent)
22793 || context_die == NULL)
22794 && (DECL_ARTIFICIAL (decl)
22795 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22796 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22797 == (unsigned) s.line)
22798 && (!debug_column_info
22799 || s.column == 0
22800 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22801 == (unsigned) s.column)))))
22802 /* With LTO if there's an abstract instance for
22803 the old DIE, this is a concrete instance and
22804 thus re-use the DIE. */
22805 || get_AT (old_die, DW_AT_abstract_origin))
22806 {
22807 subr_die = old_die;
22808
22809 /* Clear out the declaration attribute, but leave the
22810 parameters so they can be augmented with location
22811 information later. Unless this was a declaration, in
22812 which case, wipe out the nameless parameters and recreate
22813 them further down. */
22814 if (remove_AT (subr_die, DW_AT_declaration))
22815 {
22816
22817 remove_AT (subr_die, DW_AT_object_pointer);
22818 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22819 }
22820 }
22821 /* Make a specification pointing to the previously built
22822 declaration. */
22823 else
22824 {
22825 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22826 add_AT_specification (subr_die, old_die);
22827 add_pubname (decl, subr_die);
22828 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22829 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22830 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22831 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22832 if (debug_column_info
22833 && s.column
22834 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22835 != (unsigned) s.column))
22836 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22837
22838 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22839 emit the real type on the definition die. */
22840 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22841 {
22842 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22843 if (die == auto_die || die == decltype_auto_die)
22844 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22845 TYPE_UNQUALIFIED, false, context_die);
22846 }
22847
22848 /* When we process the method declaration, we haven't seen
22849 the out-of-class defaulted definition yet, so we have to
22850 recheck now. */
22851 if ((dwarf_version >= 5 || ! dwarf_strict)
22852 && !get_AT (subr_die, DW_AT_defaulted))
22853 {
22854 int defaulted
22855 = lang_hooks.decls.decl_dwarf_attribute (decl,
22856 DW_AT_defaulted);
22857 if (defaulted != -1)
22858 {
22859 /* Other values must have been handled before. */
22860 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22861 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22862 }
22863 }
22864 }
22865 }
22866 /* Create a fresh DIE for anything else. */
22867 else
22868 {
22869 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22870
22871 if (TREE_PUBLIC (decl))
22872 add_AT_flag (subr_die, DW_AT_external, 1);
22873
22874 add_name_and_src_coords_attributes (subr_die, decl);
22875 add_pubname (decl, subr_die);
22876 if (debug_info_level > DINFO_LEVEL_TERSE)
22877 {
22878 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22879 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22880 TYPE_UNQUALIFIED, false, context_die);
22881 }
22882
22883 add_pure_or_virtual_attribute (subr_die, decl);
22884 if (DECL_ARTIFICIAL (decl))
22885 add_AT_flag (subr_die, DW_AT_artificial, 1);
22886
22887 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22888 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22889
22890 add_alignment_attribute (subr_die, decl);
22891
22892 add_accessibility_attribute (subr_die, decl);
22893 }
22894
22895 /* Unless we have an existing non-declaration DIE, equate the new
22896 DIE. */
22897 if (!old_die || is_declaration_die (old_die))
22898 equate_decl_number_to_die (decl, subr_die);
22899
22900 if (declaration)
22901 {
22902 if (!old_die || !get_AT (old_die, DW_AT_inline))
22903 {
22904 add_AT_flag (subr_die, DW_AT_declaration, 1);
22905
22906 /* If this is an explicit function declaration then generate
22907 a DW_AT_explicit attribute. */
22908 if ((dwarf_version >= 3 || !dwarf_strict)
22909 && lang_hooks.decls.decl_dwarf_attribute (decl,
22910 DW_AT_explicit) == 1)
22911 add_AT_flag (subr_die, DW_AT_explicit, 1);
22912
22913 /* If this is a C++11 deleted special function member then generate
22914 a DW_AT_deleted attribute. */
22915 if ((dwarf_version >= 5 || !dwarf_strict)
22916 && lang_hooks.decls.decl_dwarf_attribute (decl,
22917 DW_AT_deleted) == 1)
22918 add_AT_flag (subr_die, DW_AT_deleted, 1);
22919
22920 /* If this is a C++11 defaulted special function member then
22921 generate a DW_AT_defaulted attribute. */
22922 if (dwarf_version >= 5 || !dwarf_strict)
22923 {
22924 int defaulted
22925 = lang_hooks.decls.decl_dwarf_attribute (decl,
22926 DW_AT_defaulted);
22927 if (defaulted != -1)
22928 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22929 }
22930
22931 /* If this is a C++11 non-static member function with & ref-qualifier
22932 then generate a DW_AT_reference attribute. */
22933 if ((dwarf_version >= 5 || !dwarf_strict)
22934 && lang_hooks.decls.decl_dwarf_attribute (decl,
22935 DW_AT_reference) == 1)
22936 add_AT_flag (subr_die, DW_AT_reference, 1);
22937
22938 /* If this is a C++11 non-static member function with &&
22939 ref-qualifier then generate a DW_AT_reference attribute. */
22940 if ((dwarf_version >= 5 || !dwarf_strict)
22941 && lang_hooks.decls.decl_dwarf_attribute (decl,
22942 DW_AT_rvalue_reference)
22943 == 1)
22944 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22945 }
22946 }
22947 /* For non DECL_EXTERNALs, if range information is available, fill
22948 the DIE with it. */
22949 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22950 {
22951 HOST_WIDE_INT cfa_fb_offset;
22952
22953 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22954
22955 if (!crtl->has_bb_partition)
22956 {
22957 dw_fde_ref fde = fun->fde;
22958 if (fde->dw_fde_begin)
22959 {
22960 /* We have already generated the labels. */
22961 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22962 fde->dw_fde_end, false);
22963 }
22964 else
22965 {
22966 /* Create start/end labels and add the range. */
22967 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22968 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22969 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22970 current_function_funcdef_no);
22971 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22972 current_function_funcdef_no);
22973 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22974 false);
22975 }
22976
22977 #if VMS_DEBUGGING_INFO
22978 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22979 Section 2.3 Prologue and Epilogue Attributes:
22980 When a breakpoint is set on entry to a function, it is generally
22981 desirable for execution to be suspended, not on the very first
22982 instruction of the function, but rather at a point after the
22983 function's frame has been set up, after any language defined local
22984 declaration processing has been completed, and before execution of
22985 the first statement of the function begins. Debuggers generally
22986 cannot properly determine where this point is. Similarly for a
22987 breakpoint set on exit from a function. The prologue and epilogue
22988 attributes allow a compiler to communicate the location(s) to use. */
22989
22990 {
22991 if (fde->dw_fde_vms_end_prologue)
22992 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22993 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22994
22995 if (fde->dw_fde_vms_begin_epilogue)
22996 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22997 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22998 }
22999 #endif
23000
23001 }
23002 else
23003 {
23004 /* Generate pubnames entries for the split function code ranges. */
23005 dw_fde_ref fde = fun->fde;
23006
23007 if (fde->dw_fde_second_begin)
23008 {
23009 if (dwarf_version >= 3 || !dwarf_strict)
23010 {
23011 /* We should use ranges for non-contiguous code section
23012 addresses. Use the actual code range for the initial
23013 section, since the HOT/COLD labels might precede an
23014 alignment offset. */
23015 bool range_list_added = false;
23016 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23017 fde->dw_fde_end, &range_list_added,
23018 false);
23019 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23020 fde->dw_fde_second_end,
23021 &range_list_added, false);
23022 if (range_list_added)
23023 add_ranges (NULL);
23024 }
23025 else
23026 {
23027 /* There is no real support in DW2 for this .. so we make
23028 a work-around. First, emit the pub name for the segment
23029 containing the function label. Then make and emit a
23030 simplified subprogram DIE for the second segment with the
23031 name pre-fixed by __hot/cold_sect_of_. We use the same
23032 linkage name for the second die so that gdb will find both
23033 sections when given "b foo". */
23034 const char *name = NULL;
23035 tree decl_name = DECL_NAME (decl);
23036 dw_die_ref seg_die;
23037
23038 /* Do the 'primary' section. */
23039 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23040 fde->dw_fde_end, false);
23041
23042 /* Build a minimal DIE for the secondary section. */
23043 seg_die = new_die (DW_TAG_subprogram,
23044 subr_die->die_parent, decl);
23045
23046 if (TREE_PUBLIC (decl))
23047 add_AT_flag (seg_die, DW_AT_external, 1);
23048
23049 if (decl_name != NULL
23050 && IDENTIFIER_POINTER (decl_name) != NULL)
23051 {
23052 name = dwarf2_name (decl, 1);
23053 if (! DECL_ARTIFICIAL (decl))
23054 add_src_coords_attributes (seg_die, decl);
23055
23056 add_linkage_name (seg_die, decl);
23057 }
23058 gcc_assert (name != NULL);
23059 add_pure_or_virtual_attribute (seg_die, decl);
23060 if (DECL_ARTIFICIAL (decl))
23061 add_AT_flag (seg_die, DW_AT_artificial, 1);
23062
23063 name = concat ("__second_sect_of_", name, NULL);
23064 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23065 fde->dw_fde_second_end, false);
23066 add_name_attribute (seg_die, name);
23067 if (want_pubnames ())
23068 add_pubname_string (name, seg_die);
23069 }
23070 }
23071 else
23072 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23073 false);
23074 }
23075
23076 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23077
23078 /* We define the "frame base" as the function's CFA. This is more
23079 convenient for several reasons: (1) It's stable across the prologue
23080 and epilogue, which makes it better than just a frame pointer,
23081 (2) With dwarf3, there exists a one-byte encoding that allows us
23082 to reference the .debug_frame data by proxy, but failing that,
23083 (3) We can at least reuse the code inspection and interpretation
23084 code that determines the CFA position at various points in the
23085 function. */
23086 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23087 {
23088 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23089 add_AT_loc (subr_die, DW_AT_frame_base, op);
23090 }
23091 else
23092 {
23093 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23094 if (list->dw_loc_next)
23095 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23096 else
23097 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23098 }
23099
23100 /* Compute a displacement from the "steady-state frame pointer" to
23101 the CFA. The former is what all stack slots and argument slots
23102 will reference in the rtl; the latter is what we've told the
23103 debugger about. We'll need to adjust all frame_base references
23104 by this displacement. */
23105 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23106
23107 if (fun->static_chain_decl)
23108 {
23109 /* DWARF requires here a location expression that computes the
23110 address of the enclosing subprogram's frame base. The machinery
23111 in tree-nested.c is supposed to store this specific address in the
23112 last field of the FRAME record. */
23113 const tree frame_type
23114 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23115 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23116
23117 tree fb_expr
23118 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23119 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23120 fb_expr, fb_decl, NULL_TREE);
23121
23122 add_AT_location_description (subr_die, DW_AT_static_link,
23123 loc_list_from_tree (fb_expr, 0, NULL));
23124 }
23125
23126 resolve_variable_values ();
23127 }
23128
23129 /* Generate child dies for template paramaters. */
23130 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23131 gen_generic_params_dies (decl);
23132
23133 /* Now output descriptions of the arguments for this function. This gets
23134 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23135 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23136 `...' at the end of the formal parameter list. In order to find out if
23137 there was a trailing ellipsis or not, we must instead look at the type
23138 associated with the FUNCTION_DECL. This will be a node of type
23139 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23140 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23141 an ellipsis at the end. */
23142
23143 /* In the case where we are describing a mere function declaration, all we
23144 need to do here (and all we *can* do here) is to describe the *types* of
23145 its formal parameters. */
23146 if (debug_info_level <= DINFO_LEVEL_TERSE)
23147 ;
23148 else if (declaration)
23149 gen_formal_types_die (decl, subr_die);
23150 else
23151 {
23152 /* Generate DIEs to represent all known formal parameters. */
23153 tree parm = DECL_ARGUMENTS (decl);
23154 tree generic_decl = early_dwarf
23155 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23156 tree generic_decl_parm = generic_decl
23157 ? DECL_ARGUMENTS (generic_decl)
23158 : NULL;
23159
23160 /* Now we want to walk the list of parameters of the function and
23161 emit their relevant DIEs.
23162
23163 We consider the case of DECL being an instance of a generic function
23164 as well as it being a normal function.
23165
23166 If DECL is an instance of a generic function we walk the
23167 parameters of the generic function declaration _and_ the parameters of
23168 DECL itself. This is useful because we want to emit specific DIEs for
23169 function parameter packs and those are declared as part of the
23170 generic function declaration. In that particular case,
23171 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23172 That DIE has children DIEs representing the set of arguments
23173 of the pack. Note that the set of pack arguments can be empty.
23174 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23175 children DIE.
23176
23177 Otherwise, we just consider the parameters of DECL. */
23178 while (generic_decl_parm || parm)
23179 {
23180 if (generic_decl_parm
23181 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23182 gen_formal_parameter_pack_die (generic_decl_parm,
23183 parm, subr_die,
23184 &parm);
23185 else if (parm)
23186 {
23187 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23188
23189 if (early_dwarf
23190 && parm == DECL_ARGUMENTS (decl)
23191 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23192 && parm_die
23193 && (dwarf_version >= 3 || !dwarf_strict))
23194 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23195
23196 parm = DECL_CHAIN (parm);
23197 }
23198 else if (parm)
23199 parm = DECL_CHAIN (parm);
23200
23201 if (generic_decl_parm)
23202 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23203 }
23204
23205 /* Decide whether we need an unspecified_parameters DIE at the end.
23206 There are 2 more cases to do this for: 1) the ansi ... declaration -
23207 this is detectable when the end of the arg list is not a
23208 void_type_node 2) an unprototyped function declaration (not a
23209 definition). This just means that we have no info about the
23210 parameters at all. */
23211 if (early_dwarf)
23212 {
23213 if (prototype_p (TREE_TYPE (decl)))
23214 {
23215 /* This is the prototyped case, check for.... */
23216 if (stdarg_p (TREE_TYPE (decl)))
23217 gen_unspecified_parameters_die (decl, subr_die);
23218 }
23219 else if (DECL_INITIAL (decl) == NULL_TREE)
23220 gen_unspecified_parameters_die (decl, subr_die);
23221 }
23222 }
23223
23224 if (subr_die != old_die)
23225 /* Add the calling convention attribute if requested. */
23226 add_calling_convention_attribute (subr_die, decl);
23227
23228 /* Output Dwarf info for all of the stuff within the body of the function
23229 (if it has one - it may be just a declaration).
23230
23231 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23232 a function. This BLOCK actually represents the outermost binding contour
23233 for the function, i.e. the contour in which the function's formal
23234 parameters and labels get declared. Curiously, it appears that the front
23235 end doesn't actually put the PARM_DECL nodes for the current function onto
23236 the BLOCK_VARS list for this outer scope, but are strung off of the
23237 DECL_ARGUMENTS list for the function instead.
23238
23239 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23240 the LABEL_DECL nodes for the function however, and we output DWARF info
23241 for those in decls_for_scope. Just within the `outer_scope' there will be
23242 a BLOCK node representing the function's outermost pair of curly braces,
23243 and any blocks used for the base and member initializers of a C++
23244 constructor function. */
23245 tree outer_scope = DECL_INITIAL (decl);
23246 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23247 {
23248 int call_site_note_count = 0;
23249 int tail_call_site_note_count = 0;
23250
23251 /* Emit a DW_TAG_variable DIE for a named return value. */
23252 if (DECL_NAME (DECL_RESULT (decl)))
23253 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23254
23255 /* The first time through decls_for_scope we will generate the
23256 DIEs for the locals. The second time, we fill in the
23257 location info. */
23258 decls_for_scope (outer_scope, subr_die);
23259
23260 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23261 {
23262 struct call_arg_loc_node *ca_loc;
23263 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23264 {
23265 dw_die_ref die = NULL;
23266 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23267 rtx arg, next_arg;
23268
23269 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23270 ? XEXP (ca_loc->call_arg_loc_note, 0)
23271 : NULL_RTX);
23272 arg; arg = next_arg)
23273 {
23274 dw_loc_descr_ref reg, val;
23275 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23276 dw_die_ref cdie, tdie = NULL;
23277
23278 next_arg = XEXP (arg, 1);
23279 if (REG_P (XEXP (XEXP (arg, 0), 0))
23280 && next_arg
23281 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23282 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23283 && REGNO (XEXP (XEXP (arg, 0), 0))
23284 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23285 next_arg = XEXP (next_arg, 1);
23286 if (mode == VOIDmode)
23287 {
23288 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23289 if (mode == VOIDmode)
23290 mode = GET_MODE (XEXP (arg, 0));
23291 }
23292 if (mode == VOIDmode || mode == BLKmode)
23293 continue;
23294 /* Get dynamic information about call target only if we
23295 have no static information: we cannot generate both
23296 DW_AT_call_origin and DW_AT_call_target
23297 attributes. */
23298 if (ca_loc->symbol_ref == NULL_RTX)
23299 {
23300 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23301 {
23302 tloc = XEXP (XEXP (arg, 0), 1);
23303 continue;
23304 }
23305 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23306 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23307 {
23308 tlocc = XEXP (XEXP (arg, 0), 1);
23309 continue;
23310 }
23311 }
23312 reg = NULL;
23313 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23314 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23315 VAR_INIT_STATUS_INITIALIZED);
23316 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23317 {
23318 rtx mem = XEXP (XEXP (arg, 0), 0);
23319 reg = mem_loc_descriptor (XEXP (mem, 0),
23320 get_address_mode (mem),
23321 GET_MODE (mem),
23322 VAR_INIT_STATUS_INITIALIZED);
23323 }
23324 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23325 == DEBUG_PARAMETER_REF)
23326 {
23327 tree tdecl
23328 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23329 tdie = lookup_decl_die (tdecl);
23330 if (tdie == NULL)
23331 continue;
23332 }
23333 else
23334 continue;
23335 if (reg == NULL
23336 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23337 != DEBUG_PARAMETER_REF)
23338 continue;
23339 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23340 VOIDmode,
23341 VAR_INIT_STATUS_INITIALIZED);
23342 if (val == NULL)
23343 continue;
23344 if (die == NULL)
23345 die = gen_call_site_die (decl, subr_die, ca_loc);
23346 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23347 NULL_TREE);
23348 if (reg != NULL)
23349 add_AT_loc (cdie, DW_AT_location, reg);
23350 else if (tdie != NULL)
23351 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23352 tdie);
23353 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23354 if (next_arg != XEXP (arg, 1))
23355 {
23356 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23357 if (mode == VOIDmode)
23358 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23359 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23360 0), 1),
23361 mode, VOIDmode,
23362 VAR_INIT_STATUS_INITIALIZED);
23363 if (val != NULL)
23364 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23365 val);
23366 }
23367 }
23368 if (die == NULL
23369 && (ca_loc->symbol_ref || tloc))
23370 die = gen_call_site_die (decl, subr_die, ca_loc);
23371 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23372 {
23373 dw_loc_descr_ref tval = NULL;
23374
23375 if (tloc != NULL_RTX)
23376 tval = mem_loc_descriptor (tloc,
23377 GET_MODE (tloc) == VOIDmode
23378 ? Pmode : GET_MODE (tloc),
23379 VOIDmode,
23380 VAR_INIT_STATUS_INITIALIZED);
23381 if (tval)
23382 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23383 else if (tlocc != NULL_RTX)
23384 {
23385 tval = mem_loc_descriptor (tlocc,
23386 GET_MODE (tlocc) == VOIDmode
23387 ? Pmode : GET_MODE (tlocc),
23388 VOIDmode,
23389 VAR_INIT_STATUS_INITIALIZED);
23390 if (tval)
23391 add_AT_loc (die,
23392 dwarf_AT (DW_AT_call_target_clobbered),
23393 tval);
23394 }
23395 }
23396 if (die != NULL)
23397 {
23398 call_site_note_count++;
23399 if (ca_loc->tail_call_p)
23400 tail_call_site_note_count++;
23401 }
23402 }
23403 }
23404 call_arg_locations = NULL;
23405 call_arg_loc_last = NULL;
23406 if (tail_call_site_count >= 0
23407 && tail_call_site_count == tail_call_site_note_count
23408 && (!dwarf_strict || dwarf_version >= 5))
23409 {
23410 if (call_site_count >= 0
23411 && call_site_count == call_site_note_count)
23412 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23413 else
23414 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23415 }
23416 call_site_count = -1;
23417 tail_call_site_count = -1;
23418 }
23419
23420 /* Mark used types after we have created DIEs for the functions scopes. */
23421 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23422 }
23423
23424 /* Returns a hash value for X (which really is a die_struct). */
23425
23426 hashval_t
23427 block_die_hasher::hash (die_struct *d)
23428 {
23429 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23430 }
23431
23432 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23433 as decl_id and die_parent of die_struct Y. */
23434
23435 bool
23436 block_die_hasher::equal (die_struct *x, die_struct *y)
23437 {
23438 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23439 }
23440
23441 /* Hold information about markers for inlined entry points. */
23442 struct GTY ((for_user)) inline_entry_data
23443 {
23444 /* The block that's the inlined_function_outer_scope for an inlined
23445 function. */
23446 tree block;
23447
23448 /* The label at the inlined entry point. */
23449 const char *label_pfx;
23450 unsigned int label_num;
23451
23452 /* The view number to be used as the inlined entry point. */
23453 var_loc_view view;
23454 };
23455
23456 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23457 {
23458 typedef tree compare_type;
23459 static inline hashval_t hash (const inline_entry_data *);
23460 static inline bool equal (const inline_entry_data *, const_tree);
23461 };
23462
23463 /* Hash table routines for inline_entry_data. */
23464
23465 inline hashval_t
23466 inline_entry_data_hasher::hash (const inline_entry_data *data)
23467 {
23468 return htab_hash_pointer (data->block);
23469 }
23470
23471 inline bool
23472 inline_entry_data_hasher::equal (const inline_entry_data *data,
23473 const_tree block)
23474 {
23475 return data->block == block;
23476 }
23477
23478 /* Inlined entry points pending DIE creation in this compilation unit. */
23479
23480 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23481
23482
23483 /* Return TRUE if DECL, which may have been previously generated as
23484 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23485 true if decl (or its origin) is either an extern declaration or a
23486 class/namespace scoped declaration.
23487
23488 The declare_in_namespace support causes us to get two DIEs for one
23489 variable, both of which are declarations. We want to avoid
23490 considering one to be a specification, so we must test for
23491 DECLARATION and DW_AT_declaration. */
23492 static inline bool
23493 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23494 {
23495 return (old_die && TREE_STATIC (decl) && !declaration
23496 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23497 }
23498
23499 /* Return true if DECL is a local static. */
23500
23501 static inline bool
23502 local_function_static (tree decl)
23503 {
23504 gcc_assert (VAR_P (decl));
23505 return TREE_STATIC (decl)
23506 && DECL_CONTEXT (decl)
23507 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23508 }
23509
23510 /* Generate a DIE to represent a declared data object.
23511 Either DECL or ORIGIN must be non-null. */
23512
23513 static void
23514 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23515 {
23516 HOST_WIDE_INT off = 0;
23517 tree com_decl;
23518 tree decl_or_origin = decl ? decl : origin;
23519 tree ultimate_origin;
23520 dw_die_ref var_die;
23521 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23522 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23523 || class_or_namespace_scope_p (context_die));
23524 bool specialization_p = false;
23525 bool no_linkage_name = false;
23526
23527 /* While C++ inline static data members have definitions inside of the
23528 class, force the first DIE to be a declaration, then let gen_member_die
23529 reparent it to the class context and call gen_variable_die again
23530 to create the outside of the class DIE for the definition. */
23531 if (!declaration
23532 && old_die == NULL
23533 && decl
23534 && DECL_CONTEXT (decl)
23535 && TYPE_P (DECL_CONTEXT (decl))
23536 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23537 {
23538 declaration = true;
23539 if (dwarf_version < 5)
23540 no_linkage_name = true;
23541 }
23542
23543 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23544 if (decl || ultimate_origin)
23545 origin = ultimate_origin;
23546 com_decl = fortran_common (decl_or_origin, &off);
23547
23548 /* Symbol in common gets emitted as a child of the common block, in the form
23549 of a data member. */
23550 if (com_decl)
23551 {
23552 dw_die_ref com_die;
23553 dw_loc_list_ref loc = NULL;
23554 die_node com_die_arg;
23555
23556 var_die = lookup_decl_die (decl_or_origin);
23557 if (var_die)
23558 {
23559 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23560 {
23561 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23562 if (loc)
23563 {
23564 if (off)
23565 {
23566 /* Optimize the common case. */
23567 if (single_element_loc_list_p (loc)
23568 && loc->expr->dw_loc_opc == DW_OP_addr
23569 && loc->expr->dw_loc_next == NULL
23570 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23571 == SYMBOL_REF)
23572 {
23573 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23574 loc->expr->dw_loc_oprnd1.v.val_addr
23575 = plus_constant (GET_MODE (x), x , off);
23576 }
23577 else
23578 loc_list_plus_const (loc, off);
23579 }
23580 add_AT_location_description (var_die, DW_AT_location, loc);
23581 remove_AT (var_die, DW_AT_declaration);
23582 }
23583 }
23584 return;
23585 }
23586
23587 if (common_block_die_table == NULL)
23588 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23589
23590 com_die_arg.decl_id = DECL_UID (com_decl);
23591 com_die_arg.die_parent = context_die;
23592 com_die = common_block_die_table->find (&com_die_arg);
23593 if (! early_dwarf)
23594 loc = loc_list_from_tree (com_decl, 2, NULL);
23595 if (com_die == NULL)
23596 {
23597 const char *cnam
23598 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23599 die_node **slot;
23600
23601 com_die = new_die (DW_TAG_common_block, context_die, decl);
23602 add_name_and_src_coords_attributes (com_die, com_decl);
23603 if (loc)
23604 {
23605 add_AT_location_description (com_die, DW_AT_location, loc);
23606 /* Avoid sharing the same loc descriptor between
23607 DW_TAG_common_block and DW_TAG_variable. */
23608 loc = loc_list_from_tree (com_decl, 2, NULL);
23609 }
23610 else if (DECL_EXTERNAL (decl_or_origin))
23611 add_AT_flag (com_die, DW_AT_declaration, 1);
23612 if (want_pubnames ())
23613 add_pubname_string (cnam, com_die); /* ??? needed? */
23614 com_die->decl_id = DECL_UID (com_decl);
23615 slot = common_block_die_table->find_slot (com_die, INSERT);
23616 *slot = com_die;
23617 }
23618 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23619 {
23620 add_AT_location_description (com_die, DW_AT_location, loc);
23621 loc = loc_list_from_tree (com_decl, 2, NULL);
23622 remove_AT (com_die, DW_AT_declaration);
23623 }
23624 var_die = new_die (DW_TAG_variable, com_die, decl);
23625 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23626 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23627 decl_quals (decl_or_origin), false,
23628 context_die);
23629 add_alignment_attribute (var_die, decl);
23630 add_AT_flag (var_die, DW_AT_external, 1);
23631 if (loc)
23632 {
23633 if (off)
23634 {
23635 /* Optimize the common case. */
23636 if (single_element_loc_list_p (loc)
23637 && loc->expr->dw_loc_opc == DW_OP_addr
23638 && loc->expr->dw_loc_next == NULL
23639 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23640 {
23641 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23642 loc->expr->dw_loc_oprnd1.v.val_addr
23643 = plus_constant (GET_MODE (x), x, off);
23644 }
23645 else
23646 loc_list_plus_const (loc, off);
23647 }
23648 add_AT_location_description (var_die, DW_AT_location, loc);
23649 }
23650 else if (DECL_EXTERNAL (decl_or_origin))
23651 add_AT_flag (var_die, DW_AT_declaration, 1);
23652 if (decl)
23653 equate_decl_number_to_die (decl, var_die);
23654 return;
23655 }
23656
23657 if (old_die)
23658 {
23659 if (declaration)
23660 {
23661 /* A declaration that has been previously dumped, needs no
23662 further annotations, since it doesn't need location on
23663 the second pass. */
23664 return;
23665 }
23666 else if (decl_will_get_specification_p (old_die, decl, declaration)
23667 && !get_AT (old_die, DW_AT_specification))
23668 {
23669 /* Fall-thru so we can make a new variable die along with a
23670 DW_AT_specification. */
23671 }
23672 else if (origin && old_die->die_parent != context_die)
23673 {
23674 /* If we will be creating an inlined instance, we need a
23675 new DIE that will get annotated with
23676 DW_AT_abstract_origin. */
23677 gcc_assert (!DECL_ABSTRACT_P (decl));
23678 }
23679 else
23680 {
23681 /* If a DIE was dumped early, it still needs location info.
23682 Skip to where we fill the location bits. */
23683 var_die = old_die;
23684
23685 /* ??? In LTRANS we cannot annotate early created variably
23686 modified type DIEs without copying them and adjusting all
23687 references to them. Thus we dumped them again. Also add a
23688 reference to them but beware of -g0 compile and -g link
23689 in which case the reference will be already present. */
23690 tree type = TREE_TYPE (decl_or_origin);
23691 if (in_lto_p
23692 && ! get_AT (var_die, DW_AT_type)
23693 && variably_modified_type_p
23694 (type, decl_function_context (decl_or_origin)))
23695 {
23696 if (decl_by_reference_p (decl_or_origin))
23697 add_type_attribute (var_die, TREE_TYPE (type),
23698 TYPE_UNQUALIFIED, false, context_die);
23699 else
23700 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23701 false, context_die);
23702 }
23703
23704 goto gen_variable_die_location;
23705 }
23706 }
23707
23708 /* For static data members, the declaration in the class is supposed
23709 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23710 also in DWARF2; the specification should still be DW_TAG_variable
23711 referencing the DW_TAG_member DIE. */
23712 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23713 var_die = new_die (DW_TAG_member, context_die, decl);
23714 else
23715 var_die = new_die (DW_TAG_variable, context_die, decl);
23716
23717 if (origin != NULL)
23718 add_abstract_origin_attribute (var_die, origin);
23719
23720 /* Loop unrolling can create multiple blocks that refer to the same
23721 static variable, so we must test for the DW_AT_declaration flag.
23722
23723 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23724 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23725 sharing them.
23726
23727 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23728 else if (decl_will_get_specification_p (old_die, decl, declaration))
23729 {
23730 /* This is a definition of a C++ class level static. */
23731 add_AT_specification (var_die, old_die);
23732 specialization_p = true;
23733 if (DECL_NAME (decl))
23734 {
23735 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23736 struct dwarf_file_data * file_index = lookup_filename (s.file);
23737
23738 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23739 add_AT_file (var_die, DW_AT_decl_file, file_index);
23740
23741 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23742 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23743
23744 if (debug_column_info
23745 && s.column
23746 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23747 != (unsigned) s.column))
23748 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23749
23750 if (old_die->die_tag == DW_TAG_member)
23751 add_linkage_name (var_die, decl);
23752 }
23753 }
23754 else
23755 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23756
23757 if ((origin == NULL && !specialization_p)
23758 || (origin != NULL
23759 && !DECL_ABSTRACT_P (decl_or_origin)
23760 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23761 decl_function_context
23762 (decl_or_origin))))
23763 {
23764 tree type = TREE_TYPE (decl_or_origin);
23765
23766 if (decl_by_reference_p (decl_or_origin))
23767 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23768 context_die);
23769 else
23770 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23771 context_die);
23772 }
23773
23774 if (origin == NULL && !specialization_p)
23775 {
23776 if (TREE_PUBLIC (decl))
23777 add_AT_flag (var_die, DW_AT_external, 1);
23778
23779 if (DECL_ARTIFICIAL (decl))
23780 add_AT_flag (var_die, DW_AT_artificial, 1);
23781
23782 add_alignment_attribute (var_die, decl);
23783
23784 add_accessibility_attribute (var_die, decl);
23785 }
23786
23787 if (declaration)
23788 add_AT_flag (var_die, DW_AT_declaration, 1);
23789
23790 if (decl && (DECL_ABSTRACT_P (decl)
23791 || !old_die || is_declaration_die (old_die)))
23792 equate_decl_number_to_die (decl, var_die);
23793
23794 gen_variable_die_location:
23795 if (! declaration
23796 && (! DECL_ABSTRACT_P (decl_or_origin)
23797 /* Local static vars are shared between all clones/inlines,
23798 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23799 already set. */
23800 || (VAR_P (decl_or_origin)
23801 && TREE_STATIC (decl_or_origin)
23802 && DECL_RTL_SET_P (decl_or_origin))))
23803 {
23804 if (early_dwarf)
23805 add_pubname (decl_or_origin, var_die);
23806 else
23807 add_location_or_const_value_attribute (var_die, decl_or_origin,
23808 decl == NULL);
23809 }
23810 else
23811 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23812
23813 if ((dwarf_version >= 4 || !dwarf_strict)
23814 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23815 DW_AT_const_expr) == 1
23816 && !get_AT (var_die, DW_AT_const_expr)
23817 && !specialization_p)
23818 add_AT_flag (var_die, DW_AT_const_expr, 1);
23819
23820 if (!dwarf_strict)
23821 {
23822 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23823 DW_AT_inline);
23824 if (inl != -1
23825 && !get_AT (var_die, DW_AT_inline)
23826 && !specialization_p)
23827 add_AT_unsigned (var_die, DW_AT_inline, inl);
23828 }
23829 }
23830
23831 /* Generate a DIE to represent a named constant. */
23832
23833 static void
23834 gen_const_die (tree decl, dw_die_ref context_die)
23835 {
23836 dw_die_ref const_die;
23837 tree type = TREE_TYPE (decl);
23838
23839 const_die = lookup_decl_die (decl);
23840 if (const_die)
23841 return;
23842
23843 const_die = new_die (DW_TAG_constant, context_die, decl);
23844 equate_decl_number_to_die (decl, const_die);
23845 add_name_and_src_coords_attributes (const_die, decl);
23846 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23847 if (TREE_PUBLIC (decl))
23848 add_AT_flag (const_die, DW_AT_external, 1);
23849 if (DECL_ARTIFICIAL (decl))
23850 add_AT_flag (const_die, DW_AT_artificial, 1);
23851 tree_add_const_value_attribute_for_decl (const_die, decl);
23852 }
23853
23854 /* Generate a DIE to represent a label identifier. */
23855
23856 static void
23857 gen_label_die (tree decl, dw_die_ref context_die)
23858 {
23859 tree origin = decl_ultimate_origin (decl);
23860 dw_die_ref lbl_die = lookup_decl_die (decl);
23861 rtx insn;
23862 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23863
23864 if (!lbl_die)
23865 {
23866 lbl_die = new_die (DW_TAG_label, context_die, decl);
23867 equate_decl_number_to_die (decl, lbl_die);
23868
23869 if (origin != NULL)
23870 add_abstract_origin_attribute (lbl_die, origin);
23871 else
23872 add_name_and_src_coords_attributes (lbl_die, decl);
23873 }
23874
23875 if (DECL_ABSTRACT_P (decl))
23876 equate_decl_number_to_die (decl, lbl_die);
23877 else if (! early_dwarf)
23878 {
23879 insn = DECL_RTL_IF_SET (decl);
23880
23881 /* Deleted labels are programmer specified labels which have been
23882 eliminated because of various optimizations. We still emit them
23883 here so that it is possible to put breakpoints on them. */
23884 if (insn
23885 && (LABEL_P (insn)
23886 || ((NOTE_P (insn)
23887 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23888 {
23889 /* When optimization is enabled (via -O) some parts of the compiler
23890 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23891 represent source-level labels which were explicitly declared by
23892 the user. This really shouldn't be happening though, so catch
23893 it if it ever does happen. */
23894 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23895
23896 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23897 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23898 }
23899 else if (insn
23900 && NOTE_P (insn)
23901 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23902 && CODE_LABEL_NUMBER (insn) != -1)
23903 {
23904 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23905 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23906 }
23907 }
23908 }
23909
23910 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23911 attributes to the DIE for a block STMT, to describe where the inlined
23912 function was called from. This is similar to add_src_coords_attributes. */
23913
23914 static inline void
23915 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23916 {
23917 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23918
23919 if (dwarf_version >= 3 || !dwarf_strict)
23920 {
23921 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23922 add_AT_unsigned (die, DW_AT_call_line, s.line);
23923 if (debug_column_info && s.column)
23924 add_AT_unsigned (die, DW_AT_call_column, s.column);
23925 }
23926 }
23927
23928
23929 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23930 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23931
23932 static inline void
23933 add_high_low_attributes (tree stmt, dw_die_ref die)
23934 {
23935 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23936
23937 if (inline_entry_data **iedp
23938 = !inline_entry_data_table ? NULL
23939 : inline_entry_data_table->find_slot_with_hash (stmt,
23940 htab_hash_pointer (stmt),
23941 NO_INSERT))
23942 {
23943 inline_entry_data *ied = *iedp;
23944 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23945 gcc_assert (debug_inline_points);
23946 gcc_assert (inlined_function_outer_scope_p (stmt));
23947
23948 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23949 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23950
23951 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23952 && !dwarf_strict)
23953 {
23954 if (!output_asm_line_debug_info ())
23955 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23956 else
23957 {
23958 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23959 /* FIXME: this will resolve to a small number. Could we
23960 possibly emit smaller data? Ideally we'd emit a
23961 uleb128, but that would make the size of DIEs
23962 impossible for the compiler to compute, since it's
23963 the assembler that computes the value of the view
23964 label in this case. Ideally, we'd have a single form
23965 encompassing both the address and the view, and
23966 indirecting them through a table might make things
23967 easier, but even that would be more wasteful,
23968 space-wise, than what we have now. */
23969 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23970 }
23971 }
23972
23973 inline_entry_data_table->clear_slot (iedp);
23974 }
23975
23976 if (BLOCK_FRAGMENT_CHAIN (stmt)
23977 && (dwarf_version >= 3 || !dwarf_strict))
23978 {
23979 tree chain, superblock = NULL_TREE;
23980 dw_die_ref pdie;
23981 dw_attr_node *attr = NULL;
23982
23983 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
23984 {
23985 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23986 BLOCK_NUMBER (stmt));
23987 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23988 }
23989
23990 /* Optimize duplicate .debug_ranges lists or even tails of
23991 lists. If this BLOCK has same ranges as its supercontext,
23992 lookup DW_AT_ranges attribute in the supercontext (and
23993 recursively so), verify that the ranges_table contains the
23994 right values and use it instead of adding a new .debug_range. */
23995 for (chain = stmt, pdie = die;
23996 BLOCK_SAME_RANGE (chain);
23997 chain = BLOCK_SUPERCONTEXT (chain))
23998 {
23999 dw_attr_node *new_attr;
24000
24001 pdie = pdie->die_parent;
24002 if (pdie == NULL)
24003 break;
24004 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24005 break;
24006 new_attr = get_AT (pdie, DW_AT_ranges);
24007 if (new_attr == NULL
24008 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24009 break;
24010 attr = new_attr;
24011 superblock = BLOCK_SUPERCONTEXT (chain);
24012 }
24013 if (attr != NULL
24014 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24015 == BLOCK_NUMBER (superblock))
24016 && BLOCK_FRAGMENT_CHAIN (superblock))
24017 {
24018 unsigned long off = attr->dw_attr_val.v.val_offset;
24019 unsigned long supercnt = 0, thiscnt = 0;
24020 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24021 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24022 {
24023 ++supercnt;
24024 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24025 == BLOCK_NUMBER (chain));
24026 }
24027 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24028 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24029 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24030 ++thiscnt;
24031 gcc_assert (supercnt >= thiscnt);
24032 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24033 false);
24034 note_rnglist_head (off + supercnt - thiscnt);
24035 return;
24036 }
24037
24038 unsigned int offset = add_ranges (stmt, true);
24039 add_AT_range_list (die, DW_AT_ranges, offset, false);
24040 note_rnglist_head (offset);
24041
24042 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24043 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24044 do
24045 {
24046 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24047 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24048 chain = BLOCK_FRAGMENT_CHAIN (chain);
24049 }
24050 while (chain);
24051 add_ranges (NULL);
24052 }
24053 else
24054 {
24055 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24056 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24057 BLOCK_NUMBER (stmt));
24058 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24059 BLOCK_NUMBER (stmt));
24060 add_AT_low_high_pc (die, label, label_high, false);
24061 }
24062 }
24063
24064 /* Generate a DIE for a lexical block. */
24065
24066 static void
24067 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24068 {
24069 dw_die_ref old_die = BLOCK_DIE (stmt);
24070 dw_die_ref stmt_die = NULL;
24071 if (!old_die)
24072 {
24073 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24074 BLOCK_DIE (stmt) = stmt_die;
24075 }
24076
24077 if (BLOCK_ABSTRACT (stmt))
24078 {
24079 if (old_die)
24080 {
24081 /* This must have been generated early and it won't even
24082 need location information since it's a DW_AT_inline
24083 function. */
24084 if (flag_checking)
24085 for (dw_die_ref c = context_die; c; c = c->die_parent)
24086 if (c->die_tag == DW_TAG_inlined_subroutine
24087 || c->die_tag == DW_TAG_subprogram)
24088 {
24089 gcc_assert (get_AT (c, DW_AT_inline));
24090 break;
24091 }
24092 return;
24093 }
24094 }
24095 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
24096 {
24097 /* If this is an inlined instance, create a new lexical die for
24098 anything below to attach DW_AT_abstract_origin to. */
24099 if (old_die)
24100 {
24101 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24102 BLOCK_DIE (stmt) = stmt_die;
24103 old_die = NULL;
24104 }
24105
24106 tree origin = block_ultimate_origin (stmt);
24107 if (origin != NULL_TREE && origin != stmt)
24108 add_abstract_origin_attribute (stmt_die, origin);
24109 }
24110
24111 if (old_die)
24112 stmt_die = old_die;
24113
24114 /* A non abstract block whose blocks have already been reordered
24115 should have the instruction range for this block. If so, set the
24116 high/low attributes. */
24117 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
24118 {
24119 gcc_assert (stmt_die);
24120 add_high_low_attributes (stmt, stmt_die);
24121 }
24122
24123 decls_for_scope (stmt, stmt_die);
24124 }
24125
24126 /* Generate a DIE for an inlined subprogram. */
24127
24128 static void
24129 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24130 {
24131 tree decl;
24132
24133 /* The instance of function that is effectively being inlined shall not
24134 be abstract. */
24135 gcc_assert (! BLOCK_ABSTRACT (stmt));
24136
24137 decl = block_ultimate_origin (stmt);
24138
24139 /* Make sure any inlined functions are known to be inlineable. */
24140 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24141 || cgraph_function_possibly_inlined_p (decl));
24142
24143 if (! BLOCK_ABSTRACT (stmt))
24144 {
24145 dw_die_ref subr_die
24146 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24147
24148 if (call_arg_locations || debug_inline_points)
24149 BLOCK_DIE (stmt) = subr_die;
24150 add_abstract_origin_attribute (subr_die, decl);
24151 if (TREE_ASM_WRITTEN (stmt))
24152 add_high_low_attributes (stmt, subr_die);
24153 add_call_src_coords_attributes (stmt, subr_die);
24154
24155 decls_for_scope (stmt, subr_die);
24156 }
24157 }
24158
24159 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24160 the comment for VLR_CONTEXT. */
24161
24162 static void
24163 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24164 {
24165 dw_die_ref decl_die;
24166
24167 if (TREE_TYPE (decl) == error_mark_node)
24168 return;
24169
24170 decl_die = new_die (DW_TAG_member, context_die, decl);
24171 add_name_and_src_coords_attributes (decl_die, decl);
24172 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24173 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24174 context_die);
24175
24176 if (DECL_BIT_FIELD_TYPE (decl))
24177 {
24178 add_byte_size_attribute (decl_die, decl);
24179 add_bit_size_attribute (decl_die, decl);
24180 add_bit_offset_attribute (decl_die, decl, ctx);
24181 }
24182
24183 add_alignment_attribute (decl_die, decl);
24184
24185 /* If we have a variant part offset, then we are supposed to process a member
24186 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24187 trees. */
24188 gcc_assert (ctx->variant_part_offset == NULL_TREE
24189 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24190 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24191 add_data_member_location_attribute (decl_die, decl, ctx);
24192
24193 if (DECL_ARTIFICIAL (decl))
24194 add_AT_flag (decl_die, DW_AT_artificial, 1);
24195
24196 add_accessibility_attribute (decl_die, decl);
24197
24198 /* Equate decl number to die, so that we can look up this decl later on. */
24199 equate_decl_number_to_die (decl, decl_die);
24200 }
24201
24202 /* Generate a DIE for a pointer to a member type. TYPE can be an
24203 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24204 pointer to member function. */
24205
24206 static void
24207 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24208 {
24209 if (lookup_type_die (type))
24210 return;
24211
24212 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24213 scope_die_for (type, context_die), type);
24214
24215 equate_type_number_to_die (type, ptr_die);
24216 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24217 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24218 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24219 context_die);
24220 add_alignment_attribute (ptr_die, type);
24221
24222 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24223 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24224 {
24225 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24226 add_AT_loc (ptr_die, DW_AT_use_location, op);
24227 }
24228 }
24229
24230 static char *producer_string;
24231
24232 /* Return a heap allocated producer string including command line options
24233 if -grecord-gcc-switches. */
24234
24235 static char *
24236 gen_producer_string (void)
24237 {
24238 size_t j;
24239 auto_vec<const char *> switches;
24240 const char *language_string = lang_hooks.name;
24241 char *producer, *tail;
24242 const char *p;
24243 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24244 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24245
24246 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24247 switch (save_decoded_options[j].opt_index)
24248 {
24249 case OPT_o:
24250 case OPT_d:
24251 case OPT_dumpbase:
24252 case OPT_dumpdir:
24253 case OPT_auxbase:
24254 case OPT_auxbase_strip:
24255 case OPT_quiet:
24256 case OPT_version:
24257 case OPT_v:
24258 case OPT_w:
24259 case OPT_L:
24260 case OPT_D:
24261 case OPT_I:
24262 case OPT_U:
24263 case OPT_SPECIAL_unknown:
24264 case OPT_SPECIAL_ignore:
24265 case OPT_SPECIAL_deprecated:
24266 case OPT_SPECIAL_program_name:
24267 case OPT_SPECIAL_input_file:
24268 case OPT_grecord_gcc_switches:
24269 case OPT__output_pch_:
24270 case OPT_fdiagnostics_show_location_:
24271 case OPT_fdiagnostics_show_option:
24272 case OPT_fdiagnostics_show_caret:
24273 case OPT_fdiagnostics_show_labels:
24274 case OPT_fdiagnostics_show_line_numbers:
24275 case OPT_fdiagnostics_color_:
24276 case OPT_fverbose_asm:
24277 case OPT____:
24278 case OPT__sysroot_:
24279 case OPT_nostdinc:
24280 case OPT_nostdinc__:
24281 case OPT_fpreprocessed:
24282 case OPT_fltrans_output_list_:
24283 case OPT_fresolution_:
24284 case OPT_fdebug_prefix_map_:
24285 case OPT_fmacro_prefix_map_:
24286 case OPT_ffile_prefix_map_:
24287 case OPT_fcompare_debug:
24288 case OPT_fchecking:
24289 case OPT_fchecking_:
24290 /* Ignore these. */
24291 continue;
24292 default:
24293 if (cl_options[save_decoded_options[j].opt_index].flags
24294 & CL_NO_DWARF_RECORD)
24295 continue;
24296 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24297 == '-');
24298 switch (save_decoded_options[j].canonical_option[0][1])
24299 {
24300 case 'M':
24301 case 'i':
24302 case 'W':
24303 continue;
24304 case 'f':
24305 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24306 "dump", 4) == 0)
24307 continue;
24308 break;
24309 default:
24310 break;
24311 }
24312 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24313 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24314 break;
24315 }
24316
24317 producer = XNEWVEC (char, plen + 1 + len + 1);
24318 tail = producer;
24319 sprintf (tail, "%s %s", language_string, version_string);
24320 tail += plen;
24321
24322 FOR_EACH_VEC_ELT (switches, j, p)
24323 {
24324 len = strlen (p);
24325 *tail = ' ';
24326 memcpy (tail + 1, p, len);
24327 tail += len + 1;
24328 }
24329
24330 *tail = '\0';
24331 return producer;
24332 }
24333
24334 /* Given a C and/or C++ language/version string return the "highest".
24335 C++ is assumed to be "higher" than C in this case. Used for merging
24336 LTO translation unit languages. */
24337 static const char *
24338 highest_c_language (const char *lang1, const char *lang2)
24339 {
24340 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24341 return "GNU C++17";
24342 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24343 return "GNU C++14";
24344 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24345 return "GNU C++11";
24346 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24347 return "GNU C++98";
24348
24349 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24350 return "GNU C17";
24351 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24352 return "GNU C11";
24353 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24354 return "GNU C99";
24355 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24356 return "GNU C89";
24357
24358 gcc_unreachable ();
24359 }
24360
24361
24362 /* Generate the DIE for the compilation unit. */
24363
24364 static dw_die_ref
24365 gen_compile_unit_die (const char *filename)
24366 {
24367 dw_die_ref die;
24368 const char *language_string = lang_hooks.name;
24369 int language;
24370
24371 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24372
24373 if (filename)
24374 {
24375 add_name_attribute (die, filename);
24376 /* Don't add cwd for <built-in>. */
24377 if (filename[0] != '<')
24378 add_comp_dir_attribute (die);
24379 }
24380
24381 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24382
24383 /* If our producer is LTO try to figure out a common language to use
24384 from the global list of translation units. */
24385 if (strcmp (language_string, "GNU GIMPLE") == 0)
24386 {
24387 unsigned i;
24388 tree t;
24389 const char *common_lang = NULL;
24390
24391 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24392 {
24393 if (!TRANSLATION_UNIT_LANGUAGE (t))
24394 continue;
24395 if (!common_lang)
24396 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24397 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24398 ;
24399 else if (strncmp (common_lang, "GNU C", 5) == 0
24400 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24401 /* Mixing C and C++ is ok, use C++ in that case. */
24402 common_lang = highest_c_language (common_lang,
24403 TRANSLATION_UNIT_LANGUAGE (t));
24404 else
24405 {
24406 /* Fall back to C. */
24407 common_lang = NULL;
24408 break;
24409 }
24410 }
24411
24412 if (common_lang)
24413 language_string = common_lang;
24414 }
24415
24416 language = DW_LANG_C;
24417 if (strncmp (language_string, "GNU C", 5) == 0
24418 && ISDIGIT (language_string[5]))
24419 {
24420 language = DW_LANG_C89;
24421 if (dwarf_version >= 3 || !dwarf_strict)
24422 {
24423 if (strcmp (language_string, "GNU C89") != 0)
24424 language = DW_LANG_C99;
24425
24426 if (dwarf_version >= 5 /* || !dwarf_strict */)
24427 if (strcmp (language_string, "GNU C11") == 0
24428 || strcmp (language_string, "GNU C17") == 0)
24429 language = DW_LANG_C11;
24430 }
24431 }
24432 else if (strncmp (language_string, "GNU C++", 7) == 0)
24433 {
24434 language = DW_LANG_C_plus_plus;
24435 if (dwarf_version >= 5 /* || !dwarf_strict */)
24436 {
24437 if (strcmp (language_string, "GNU C++11") == 0)
24438 language = DW_LANG_C_plus_plus_11;
24439 else if (strcmp (language_string, "GNU C++14") == 0)
24440 language = DW_LANG_C_plus_plus_14;
24441 else if (strcmp (language_string, "GNU C++17") == 0)
24442 /* For now. */
24443 language = DW_LANG_C_plus_plus_14;
24444 }
24445 }
24446 else if (strcmp (language_string, "GNU F77") == 0)
24447 language = DW_LANG_Fortran77;
24448 else if (dwarf_version >= 3 || !dwarf_strict)
24449 {
24450 if (strcmp (language_string, "GNU Ada") == 0)
24451 language = DW_LANG_Ada95;
24452 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24453 {
24454 language = DW_LANG_Fortran95;
24455 if (dwarf_version >= 5 /* || !dwarf_strict */)
24456 {
24457 if (strcmp (language_string, "GNU Fortran2003") == 0)
24458 language = DW_LANG_Fortran03;
24459 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24460 language = DW_LANG_Fortran08;
24461 }
24462 }
24463 else if (strcmp (language_string, "GNU Objective-C") == 0)
24464 language = DW_LANG_ObjC;
24465 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24466 language = DW_LANG_ObjC_plus_plus;
24467 else if (dwarf_version >= 5 || !dwarf_strict)
24468 {
24469 if (strcmp (language_string, "GNU Go") == 0)
24470 language = DW_LANG_Go;
24471 }
24472 }
24473 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24474 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24475 language = DW_LANG_Fortran90;
24476 /* Likewise for Ada. */
24477 else if (strcmp (language_string, "GNU Ada") == 0)
24478 language = DW_LANG_Ada83;
24479
24480 add_AT_unsigned (die, DW_AT_language, language);
24481
24482 switch (language)
24483 {
24484 case DW_LANG_Fortran77:
24485 case DW_LANG_Fortran90:
24486 case DW_LANG_Fortran95:
24487 case DW_LANG_Fortran03:
24488 case DW_LANG_Fortran08:
24489 /* Fortran has case insensitive identifiers and the front-end
24490 lowercases everything. */
24491 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24492 break;
24493 default:
24494 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24495 break;
24496 }
24497 return die;
24498 }
24499
24500 /* Generate the DIE for a base class. */
24501
24502 static void
24503 gen_inheritance_die (tree binfo, tree access, tree type,
24504 dw_die_ref context_die)
24505 {
24506 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24507 struct vlr_context ctx = { type, NULL };
24508
24509 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24510 context_die);
24511 add_data_member_location_attribute (die, binfo, &ctx);
24512
24513 if (BINFO_VIRTUAL_P (binfo))
24514 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24515
24516 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24517 children, otherwise the default is DW_ACCESS_public. In DWARF2
24518 the default has always been DW_ACCESS_private. */
24519 if (access == access_public_node)
24520 {
24521 if (dwarf_version == 2
24522 || context_die->die_tag == DW_TAG_class_type)
24523 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24524 }
24525 else if (access == access_protected_node)
24526 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24527 else if (dwarf_version > 2
24528 && context_die->die_tag != DW_TAG_class_type)
24529 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24530 }
24531
24532 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24533 structure. */
24534 static bool
24535 is_variant_part (tree decl)
24536 {
24537 return (TREE_CODE (decl) == FIELD_DECL
24538 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24539 }
24540
24541 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24542 return the FIELD_DECL. Return NULL_TREE otherwise. */
24543
24544 static tree
24545 analyze_discr_in_predicate (tree operand, tree struct_type)
24546 {
24547 bool continue_stripping = true;
24548 while (continue_stripping)
24549 switch (TREE_CODE (operand))
24550 {
24551 CASE_CONVERT:
24552 operand = TREE_OPERAND (operand, 0);
24553 break;
24554 default:
24555 continue_stripping = false;
24556 break;
24557 }
24558
24559 /* Match field access to members of struct_type only. */
24560 if (TREE_CODE (operand) == COMPONENT_REF
24561 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24562 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24563 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24564 return TREE_OPERAND (operand, 1);
24565 else
24566 return NULL_TREE;
24567 }
24568
24569 /* Check that SRC is a constant integer that can be represented as a native
24570 integer constant (either signed or unsigned). If so, store it into DEST and
24571 return true. Return false otherwise. */
24572
24573 static bool
24574 get_discr_value (tree src, dw_discr_value *dest)
24575 {
24576 tree discr_type = TREE_TYPE (src);
24577
24578 if (lang_hooks.types.get_debug_type)
24579 {
24580 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24581 if (debug_type != NULL)
24582 discr_type = debug_type;
24583 }
24584
24585 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24586 return false;
24587
24588 /* Signedness can vary between the original type and the debug type. This
24589 can happen for character types in Ada for instance: the character type
24590 used for code generation can be signed, to be compatible with the C one,
24591 but from a debugger point of view, it must be unsigned. */
24592 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24593 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24594
24595 if (is_orig_unsigned != is_debug_unsigned)
24596 src = fold_convert (discr_type, src);
24597
24598 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24599 return false;
24600
24601 dest->pos = is_debug_unsigned;
24602 if (is_debug_unsigned)
24603 dest->v.uval = tree_to_uhwi (src);
24604 else
24605 dest->v.sval = tree_to_shwi (src);
24606
24607 return true;
24608 }
24609
24610 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24611 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24612 store NULL_TREE in DISCR_DECL. Otherwise:
24613
24614 - store the discriminant field in STRUCT_TYPE that controls the variant
24615 part to *DISCR_DECL
24616
24617 - put in *DISCR_LISTS_P an array where for each variant, the item
24618 represents the corresponding matching list of discriminant values.
24619
24620 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24621 the above array.
24622
24623 Note that when the array is allocated (i.e. when the analysis is
24624 successful), it is up to the caller to free the array. */
24625
24626 static void
24627 analyze_variants_discr (tree variant_part_decl,
24628 tree struct_type,
24629 tree *discr_decl,
24630 dw_discr_list_ref **discr_lists_p,
24631 unsigned *discr_lists_length)
24632 {
24633 tree variant_part_type = TREE_TYPE (variant_part_decl);
24634 tree variant;
24635 dw_discr_list_ref *discr_lists;
24636 unsigned i;
24637
24638 /* Compute how many variants there are in this variant part. */
24639 *discr_lists_length = 0;
24640 for (variant = TYPE_FIELDS (variant_part_type);
24641 variant != NULL_TREE;
24642 variant = DECL_CHAIN (variant))
24643 ++*discr_lists_length;
24644
24645 *discr_decl = NULL_TREE;
24646 *discr_lists_p
24647 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24648 sizeof (**discr_lists_p));
24649 discr_lists = *discr_lists_p;
24650
24651 /* And then analyze all variants to extract discriminant information for all
24652 of them. This analysis is conservative: as soon as we detect something we
24653 do not support, abort everything and pretend we found nothing. */
24654 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24655 variant != NULL_TREE;
24656 variant = DECL_CHAIN (variant), ++i)
24657 {
24658 tree match_expr = DECL_QUALIFIER (variant);
24659
24660 /* Now, try to analyze the predicate and deduce a discriminant for
24661 it. */
24662 if (match_expr == boolean_true_node)
24663 /* Typically happens for the default variant: it matches all cases that
24664 previous variants rejected. Don't output any matching value for
24665 this one. */
24666 continue;
24667
24668 /* The following loop tries to iterate over each discriminant
24669 possibility: single values or ranges. */
24670 while (match_expr != NULL_TREE)
24671 {
24672 tree next_round_match_expr;
24673 tree candidate_discr = NULL_TREE;
24674 dw_discr_list_ref new_node = NULL;
24675
24676 /* Possibilities are matched one after the other by nested
24677 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24678 continue with the rest at next iteration. */
24679 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24680 {
24681 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24682 match_expr = TREE_OPERAND (match_expr, 1);
24683 }
24684 else
24685 next_round_match_expr = NULL_TREE;
24686
24687 if (match_expr == boolean_false_node)
24688 /* This sub-expression matches nothing: just wait for the next
24689 one. */
24690 ;
24691
24692 else if (TREE_CODE (match_expr) == EQ_EXPR)
24693 {
24694 /* We are matching: <discr_field> == <integer_cst>
24695 This sub-expression matches a single value. */
24696 tree integer_cst = TREE_OPERAND (match_expr, 1);
24697
24698 candidate_discr
24699 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24700 struct_type);
24701
24702 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24703 if (!get_discr_value (integer_cst,
24704 &new_node->dw_discr_lower_bound))
24705 goto abort;
24706 new_node->dw_discr_range = false;
24707 }
24708
24709 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24710 {
24711 /* We are matching:
24712 <discr_field> > <integer_cst>
24713 && <discr_field> < <integer_cst>.
24714 This sub-expression matches the range of values between the
24715 two matched integer constants. Note that comparisons can be
24716 inclusive or exclusive. */
24717 tree candidate_discr_1, candidate_discr_2;
24718 tree lower_cst, upper_cst;
24719 bool lower_cst_included, upper_cst_included;
24720 tree lower_op = TREE_OPERAND (match_expr, 0);
24721 tree upper_op = TREE_OPERAND (match_expr, 1);
24722
24723 /* When the comparison is exclusive, the integer constant is not
24724 the discriminant range bound we are looking for: we will have
24725 to increment or decrement it. */
24726 if (TREE_CODE (lower_op) == GE_EXPR)
24727 lower_cst_included = true;
24728 else if (TREE_CODE (lower_op) == GT_EXPR)
24729 lower_cst_included = false;
24730 else
24731 goto abort;
24732
24733 if (TREE_CODE (upper_op) == LE_EXPR)
24734 upper_cst_included = true;
24735 else if (TREE_CODE (upper_op) == LT_EXPR)
24736 upper_cst_included = false;
24737 else
24738 goto abort;
24739
24740 /* Extract the discriminant from the first operand and check it
24741 is consistant with the same analysis in the second
24742 operand. */
24743 candidate_discr_1
24744 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24745 struct_type);
24746 candidate_discr_2
24747 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24748 struct_type);
24749 if (candidate_discr_1 == candidate_discr_2)
24750 candidate_discr = candidate_discr_1;
24751 else
24752 goto abort;
24753
24754 /* Extract bounds from both. */
24755 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24756 lower_cst = TREE_OPERAND (lower_op, 1);
24757 upper_cst = TREE_OPERAND (upper_op, 1);
24758
24759 if (!lower_cst_included)
24760 lower_cst
24761 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24762 build_int_cst (TREE_TYPE (lower_cst), 1));
24763 if (!upper_cst_included)
24764 upper_cst
24765 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24766 build_int_cst (TREE_TYPE (upper_cst), 1));
24767
24768 if (!get_discr_value (lower_cst,
24769 &new_node->dw_discr_lower_bound)
24770 || !get_discr_value (upper_cst,
24771 &new_node->dw_discr_upper_bound))
24772 goto abort;
24773
24774 new_node->dw_discr_range = true;
24775 }
24776
24777 else
24778 /* Unsupported sub-expression: we cannot determine the set of
24779 matching discriminant values. Abort everything. */
24780 goto abort;
24781
24782 /* If the discriminant info is not consistant with what we saw so
24783 far, consider the analysis failed and abort everything. */
24784 if (candidate_discr == NULL_TREE
24785 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24786 goto abort;
24787 else
24788 *discr_decl = candidate_discr;
24789
24790 if (new_node != NULL)
24791 {
24792 new_node->dw_discr_next = discr_lists[i];
24793 discr_lists[i] = new_node;
24794 }
24795 match_expr = next_round_match_expr;
24796 }
24797 }
24798
24799 /* If we reach this point, we could match everything we were interested
24800 in. */
24801 return;
24802
24803 abort:
24804 /* Clean all data structure and return no result. */
24805 free (*discr_lists_p);
24806 *discr_lists_p = NULL;
24807 *discr_decl = NULL_TREE;
24808 }
24809
24810 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24811 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24812 under CONTEXT_DIE.
24813
24814 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24815 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24816 this type, which are record types, represent the available variants and each
24817 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24818 values are inferred from these attributes.
24819
24820 In trees, the offsets for the fields inside these sub-records are relative
24821 to the variant part itself, whereas the corresponding DIEs should have
24822 offset attributes that are relative to the embedding record base address.
24823 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24824 must be an expression that computes the offset of the variant part to
24825 describe in DWARF. */
24826
24827 static void
24828 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24829 dw_die_ref context_die)
24830 {
24831 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24832 tree variant_part_offset = vlr_ctx->variant_part_offset;
24833 struct loc_descr_context ctx = {
24834 vlr_ctx->struct_type, /* context_type */
24835 NULL_TREE, /* base_decl */
24836 NULL, /* dpi */
24837 false, /* placeholder_arg */
24838 false /* placeholder_seen */
24839 };
24840
24841 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24842 NULL_TREE if there is no such field. */
24843 tree discr_decl = NULL_TREE;
24844 dw_discr_list_ref *discr_lists;
24845 unsigned discr_lists_length = 0;
24846 unsigned i;
24847
24848 dw_die_ref dwarf_proc_die = NULL;
24849 dw_die_ref variant_part_die
24850 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24851
24852 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24853
24854 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24855 &discr_decl, &discr_lists, &discr_lists_length);
24856
24857 if (discr_decl != NULL_TREE)
24858 {
24859 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24860
24861 if (discr_die)
24862 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24863 else
24864 /* We have no DIE for the discriminant, so just discard all
24865 discrimimant information in the output. */
24866 discr_decl = NULL_TREE;
24867 }
24868
24869 /* If the offset for this variant part is more complex than a constant,
24870 create a DWARF procedure for it so that we will not have to generate DWARF
24871 expressions for it for each member. */
24872 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24873 && (dwarf_version >= 3 || !dwarf_strict))
24874 {
24875 const tree dwarf_proc_fndecl
24876 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24877 build_function_type (TREE_TYPE (variant_part_offset),
24878 NULL_TREE));
24879 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24880 const dw_loc_descr_ref dwarf_proc_body
24881 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24882
24883 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24884 dwarf_proc_fndecl, context_die);
24885 if (dwarf_proc_die != NULL)
24886 variant_part_offset = dwarf_proc_call;
24887 }
24888
24889 /* Output DIEs for all variants. */
24890 i = 0;
24891 for (tree variant = TYPE_FIELDS (variant_part_type);
24892 variant != NULL_TREE;
24893 variant = DECL_CHAIN (variant), ++i)
24894 {
24895 tree variant_type = TREE_TYPE (variant);
24896 dw_die_ref variant_die;
24897
24898 /* All variants (i.e. members of a variant part) are supposed to be
24899 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24900 under these records. */
24901 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24902
24903 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24904 equate_decl_number_to_die (variant, variant_die);
24905
24906 /* Output discriminant values this variant matches, if any. */
24907 if (discr_decl == NULL || discr_lists[i] == NULL)
24908 /* In the case we have discriminant information at all, this is
24909 probably the default variant: as the standard says, don't
24910 output any discriminant value/list attribute. */
24911 ;
24912 else if (discr_lists[i]->dw_discr_next == NULL
24913 && !discr_lists[i]->dw_discr_range)
24914 /* If there is only one accepted value, don't bother outputting a
24915 list. */
24916 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24917 else
24918 add_discr_list (variant_die, discr_lists[i]);
24919
24920 for (tree member = TYPE_FIELDS (variant_type);
24921 member != NULL_TREE;
24922 member = DECL_CHAIN (member))
24923 {
24924 struct vlr_context vlr_sub_ctx = {
24925 vlr_ctx->struct_type, /* struct_type */
24926 NULL /* variant_part_offset */
24927 };
24928 if (is_variant_part (member))
24929 {
24930 /* All offsets for fields inside variant parts are relative to
24931 the top-level embedding RECORD_TYPE's base address. On the
24932 other hand, offsets in GCC's types are relative to the
24933 nested-most variant part. So we have to sum offsets each time
24934 we recurse. */
24935
24936 vlr_sub_ctx.variant_part_offset
24937 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24938 variant_part_offset, byte_position (member));
24939 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24940 }
24941 else
24942 {
24943 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24944 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24945 }
24946 }
24947 }
24948
24949 free (discr_lists);
24950 }
24951
24952 /* Generate a DIE for a class member. */
24953
24954 static void
24955 gen_member_die (tree type, dw_die_ref context_die)
24956 {
24957 tree member;
24958 tree binfo = TYPE_BINFO (type);
24959
24960 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24961
24962 /* If this is not an incomplete type, output descriptions of each of its
24963 members. Note that as we output the DIEs necessary to represent the
24964 members of this record or union type, we will also be trying to output
24965 DIEs to represent the *types* of those members. However the `type'
24966 function (above) will specifically avoid generating type DIEs for member
24967 types *within* the list of member DIEs for this (containing) type except
24968 for those types (of members) which are explicitly marked as also being
24969 members of this (containing) type themselves. The g++ front- end can
24970 force any given type to be treated as a member of some other (containing)
24971 type by setting the TYPE_CONTEXT of the given (member) type to point to
24972 the TREE node representing the appropriate (containing) type. */
24973
24974 /* First output info about the base classes. */
24975 if (binfo)
24976 {
24977 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24978 int i;
24979 tree base;
24980
24981 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24982 gen_inheritance_die (base,
24983 (accesses ? (*accesses)[i] : access_public_node),
24984 type,
24985 context_die);
24986 }
24987
24988 /* Now output info about the data members and type members. */
24989 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24990 {
24991 struct vlr_context vlr_ctx = { type, NULL_TREE };
24992 bool static_inline_p
24993 = (TREE_STATIC (member)
24994 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24995 != -1));
24996
24997 /* Ignore clones. */
24998 if (DECL_ABSTRACT_ORIGIN (member))
24999 continue;
25000
25001 /* If we thought we were generating minimal debug info for TYPE
25002 and then changed our minds, some of the member declarations
25003 may have already been defined. Don't define them again, but
25004 do put them in the right order. */
25005
25006 if (dw_die_ref child = lookup_decl_die (member))
25007 {
25008 /* Handle inline static data members, which only have in-class
25009 declarations. */
25010 dw_die_ref ref = NULL;
25011 if (child->die_tag == DW_TAG_variable
25012 && child->die_parent == comp_unit_die ())
25013 {
25014 ref = get_AT_ref (child, DW_AT_specification);
25015 /* For C++17 inline static data members followed by redundant
25016 out of class redeclaration, we might get here with
25017 child being the DIE created for the out of class
25018 redeclaration and with its DW_AT_specification being
25019 the DIE created for in-class definition. We want to
25020 reparent the latter, and don't want to create another
25021 DIE with DW_AT_specification in that case, because
25022 we already have one. */
25023 if (ref
25024 && static_inline_p
25025 && ref->die_tag == DW_TAG_variable
25026 && ref->die_parent == comp_unit_die ()
25027 && get_AT (ref, DW_AT_specification) == NULL)
25028 {
25029 child = ref;
25030 ref = NULL;
25031 static_inline_p = false;
25032 }
25033 }
25034
25035 if (child->die_tag == DW_TAG_variable
25036 && child->die_parent == comp_unit_die ()
25037 && ref == NULL)
25038 {
25039 reparent_child (child, context_die);
25040 if (dwarf_version < 5)
25041 child->die_tag = DW_TAG_member;
25042 }
25043 else
25044 splice_child_die (context_die, child);
25045 }
25046
25047 /* Do not generate standard DWARF for variant parts if we are generating
25048 the corresponding GNAT encodings: DIEs generated for both would
25049 conflict in our mappings. */
25050 else if (is_variant_part (member)
25051 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25052 {
25053 vlr_ctx.variant_part_offset = byte_position (member);
25054 gen_variant_part (member, &vlr_ctx, context_die);
25055 }
25056 else
25057 {
25058 vlr_ctx.variant_part_offset = NULL_TREE;
25059 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25060 }
25061
25062 /* For C++ inline static data members emit immediately a DW_TAG_variable
25063 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25064 DW_AT_specification. */
25065 if (static_inline_p)
25066 {
25067 int old_extern = DECL_EXTERNAL (member);
25068 DECL_EXTERNAL (member) = 0;
25069 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25070 DECL_EXTERNAL (member) = old_extern;
25071 }
25072 }
25073 }
25074
25075 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25076 is set, we pretend that the type was never defined, so we only get the
25077 member DIEs needed by later specification DIEs. */
25078
25079 static void
25080 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25081 enum debug_info_usage usage)
25082 {
25083 if (TREE_ASM_WRITTEN (type))
25084 {
25085 /* Fill in the bound of variable-length fields in late dwarf if
25086 still incomplete. */
25087 if (!early_dwarf && variably_modified_type_p (type, NULL))
25088 for (tree member = TYPE_FIELDS (type);
25089 member;
25090 member = DECL_CHAIN (member))
25091 fill_variable_array_bounds (TREE_TYPE (member));
25092 return;
25093 }
25094
25095 dw_die_ref type_die = lookup_type_die (type);
25096 dw_die_ref scope_die = 0;
25097 int nested = 0;
25098 int complete = (TYPE_SIZE (type)
25099 && (! TYPE_STUB_DECL (type)
25100 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25101 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25102 complete = complete && should_emit_struct_debug (type, usage);
25103
25104 if (type_die && ! complete)
25105 return;
25106
25107 if (TYPE_CONTEXT (type) != NULL_TREE
25108 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25109 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25110 nested = 1;
25111
25112 scope_die = scope_die_for (type, context_die);
25113
25114 /* Generate child dies for template paramaters. */
25115 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25116 schedule_generic_params_dies_gen (type);
25117
25118 if (! type_die || (nested && is_cu_die (scope_die)))
25119 /* First occurrence of type or toplevel definition of nested class. */
25120 {
25121 dw_die_ref old_die = type_die;
25122
25123 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25124 ? record_type_tag (type) : DW_TAG_union_type,
25125 scope_die, type);
25126 equate_type_number_to_die (type, type_die);
25127 if (old_die)
25128 add_AT_specification (type_die, old_die);
25129 else
25130 add_name_attribute (type_die, type_tag (type));
25131 }
25132 else
25133 remove_AT (type_die, DW_AT_declaration);
25134
25135 /* If this type has been completed, then give it a byte_size attribute and
25136 then give a list of members. */
25137 if (complete && !ns_decl)
25138 {
25139 /* Prevent infinite recursion in cases where the type of some member of
25140 this type is expressed in terms of this type itself. */
25141 TREE_ASM_WRITTEN (type) = 1;
25142 add_byte_size_attribute (type_die, type);
25143 add_alignment_attribute (type_die, type);
25144 if (TYPE_STUB_DECL (type) != NULL_TREE)
25145 {
25146 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25147 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25148 }
25149
25150 /* If the first reference to this type was as the return type of an
25151 inline function, then it may not have a parent. Fix this now. */
25152 if (type_die->die_parent == NULL)
25153 add_child_die (scope_die, type_die);
25154
25155 gen_member_die (type, type_die);
25156
25157 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25158 if (TYPE_ARTIFICIAL (type))
25159 add_AT_flag (type_die, DW_AT_artificial, 1);
25160
25161 /* GNU extension: Record what type our vtable lives in. */
25162 if (TYPE_VFIELD (type))
25163 {
25164 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25165
25166 gen_type_die (vtype, context_die);
25167 add_AT_die_ref (type_die, DW_AT_containing_type,
25168 lookup_type_die (vtype));
25169 }
25170 }
25171 else
25172 {
25173 add_AT_flag (type_die, DW_AT_declaration, 1);
25174
25175 /* We don't need to do this for function-local types. */
25176 if (TYPE_STUB_DECL (type)
25177 && ! decl_function_context (TYPE_STUB_DECL (type)))
25178 vec_safe_push (incomplete_types, type);
25179 }
25180
25181 if (get_AT (type_die, DW_AT_name))
25182 add_pubtype (type, type_die);
25183 }
25184
25185 /* Generate a DIE for a subroutine _type_. */
25186
25187 static void
25188 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25189 {
25190 tree return_type = TREE_TYPE (type);
25191 dw_die_ref subr_die
25192 = new_die (DW_TAG_subroutine_type,
25193 scope_die_for (type, context_die), type);
25194
25195 equate_type_number_to_die (type, subr_die);
25196 add_prototyped_attribute (subr_die, type);
25197 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25198 context_die);
25199 add_alignment_attribute (subr_die, type);
25200 gen_formal_types_die (type, subr_die);
25201
25202 if (get_AT (subr_die, DW_AT_name))
25203 add_pubtype (type, subr_die);
25204 if ((dwarf_version >= 5 || !dwarf_strict)
25205 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25206 add_AT_flag (subr_die, DW_AT_reference, 1);
25207 if ((dwarf_version >= 5 || !dwarf_strict)
25208 && lang_hooks.types.type_dwarf_attribute (type,
25209 DW_AT_rvalue_reference) != -1)
25210 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25211 }
25212
25213 /* Generate a DIE for a type definition. */
25214
25215 static void
25216 gen_typedef_die (tree decl, dw_die_ref context_die)
25217 {
25218 dw_die_ref type_die;
25219 tree type;
25220
25221 if (TREE_ASM_WRITTEN (decl))
25222 {
25223 if (DECL_ORIGINAL_TYPE (decl))
25224 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25225 return;
25226 }
25227
25228 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25229 checks in process_scope_var and modified_type_die), this should be called
25230 only for original types. */
25231 gcc_assert (decl_ultimate_origin (decl) == NULL
25232 || decl_ultimate_origin (decl) == decl);
25233
25234 TREE_ASM_WRITTEN (decl) = 1;
25235 type_die = new_die (DW_TAG_typedef, context_die, decl);
25236
25237 add_name_and_src_coords_attributes (type_die, decl);
25238 if (DECL_ORIGINAL_TYPE (decl))
25239 {
25240 type = DECL_ORIGINAL_TYPE (decl);
25241 if (type == error_mark_node)
25242 return;
25243
25244 gcc_assert (type != TREE_TYPE (decl));
25245 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25246 }
25247 else
25248 {
25249 type = TREE_TYPE (decl);
25250 if (type == error_mark_node)
25251 return;
25252
25253 if (is_naming_typedef_decl (TYPE_NAME (type)))
25254 {
25255 /* Here, we are in the case of decl being a typedef naming
25256 an anonymous type, e.g:
25257 typedef struct {...} foo;
25258 In that case TREE_TYPE (decl) is not a typedef variant
25259 type and TYPE_NAME of the anonymous type is set to the
25260 TYPE_DECL of the typedef. This construct is emitted by
25261 the C++ FE.
25262
25263 TYPE is the anonymous struct named by the typedef
25264 DECL. As we need the DW_AT_type attribute of the
25265 DW_TAG_typedef to point to the DIE of TYPE, let's
25266 generate that DIE right away. add_type_attribute
25267 called below will then pick (via lookup_type_die) that
25268 anonymous struct DIE. */
25269 if (!TREE_ASM_WRITTEN (type))
25270 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25271
25272 /* This is a GNU Extension. We are adding a
25273 DW_AT_linkage_name attribute to the DIE of the
25274 anonymous struct TYPE. The value of that attribute
25275 is the name of the typedef decl naming the anonymous
25276 struct. This greatly eases the work of consumers of
25277 this debug info. */
25278 add_linkage_name_raw (lookup_type_die (type), decl);
25279 }
25280 }
25281
25282 add_type_attribute (type_die, type, decl_quals (decl), false,
25283 context_die);
25284
25285 if (is_naming_typedef_decl (decl))
25286 /* We want that all subsequent calls to lookup_type_die with
25287 TYPE in argument yield the DW_TAG_typedef we have just
25288 created. */
25289 equate_type_number_to_die (type, type_die);
25290
25291 add_alignment_attribute (type_die, TREE_TYPE (decl));
25292
25293 add_accessibility_attribute (type_die, decl);
25294
25295 if (DECL_ABSTRACT_P (decl))
25296 equate_decl_number_to_die (decl, type_die);
25297
25298 if (get_AT (type_die, DW_AT_name))
25299 add_pubtype (decl, type_die);
25300 }
25301
25302 /* Generate a DIE for a struct, class, enum or union type. */
25303
25304 static void
25305 gen_tagged_type_die (tree type,
25306 dw_die_ref context_die,
25307 enum debug_info_usage usage)
25308 {
25309 if (type == NULL_TREE
25310 || !is_tagged_type (type))
25311 return;
25312
25313 if (TREE_ASM_WRITTEN (type))
25314 ;
25315 /* If this is a nested type whose containing class hasn't been written
25316 out yet, writing it out will cover this one, too. This does not apply
25317 to instantiations of member class templates; they need to be added to
25318 the containing class as they are generated. FIXME: This hurts the
25319 idea of combining type decls from multiple TUs, since we can't predict
25320 what set of template instantiations we'll get. */
25321 else if (TYPE_CONTEXT (type)
25322 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25323 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25324 {
25325 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25326
25327 if (TREE_ASM_WRITTEN (type))
25328 return;
25329
25330 /* If that failed, attach ourselves to the stub. */
25331 context_die = lookup_type_die (TYPE_CONTEXT (type));
25332 }
25333 else if (TYPE_CONTEXT (type) != NULL_TREE
25334 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25335 {
25336 /* If this type is local to a function that hasn't been written
25337 out yet, use a NULL context for now; it will be fixed up in
25338 decls_for_scope. */
25339 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25340 /* A declaration DIE doesn't count; nested types need to go in the
25341 specification. */
25342 if (context_die && is_declaration_die (context_die))
25343 context_die = NULL;
25344 }
25345 else
25346 context_die = declare_in_namespace (type, context_die);
25347
25348 if (TREE_CODE (type) == ENUMERAL_TYPE)
25349 {
25350 /* This might have been written out by the call to
25351 declare_in_namespace. */
25352 if (!TREE_ASM_WRITTEN (type))
25353 gen_enumeration_type_die (type, context_die);
25354 }
25355 else
25356 gen_struct_or_union_type_die (type, context_die, usage);
25357
25358 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25359 it up if it is ever completed. gen_*_type_die will set it for us
25360 when appropriate. */
25361 }
25362
25363 /* Generate a type description DIE. */
25364
25365 static void
25366 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25367 enum debug_info_usage usage)
25368 {
25369 struct array_descr_info info;
25370
25371 if (type == NULL_TREE || type == error_mark_node)
25372 return;
25373
25374 if (flag_checking && type)
25375 verify_type (type);
25376
25377 if (TYPE_NAME (type) != NULL_TREE
25378 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25379 && is_redundant_typedef (TYPE_NAME (type))
25380 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25381 /* The DECL of this type is a typedef we don't want to emit debug
25382 info for but we want debug info for its underlying typedef.
25383 This can happen for e.g, the injected-class-name of a C++
25384 type. */
25385 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25386
25387 /* If TYPE is a typedef type variant, let's generate debug info
25388 for the parent typedef which TYPE is a type of. */
25389 if (typedef_variant_p (type))
25390 {
25391 if (TREE_ASM_WRITTEN (type))
25392 return;
25393
25394 tree name = TYPE_NAME (type);
25395 tree origin = decl_ultimate_origin (name);
25396 if (origin != NULL && origin != name)
25397 {
25398 gen_decl_die (origin, NULL, NULL, context_die);
25399 return;
25400 }
25401
25402 /* Prevent broken recursion; we can't hand off to the same type. */
25403 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25404
25405 /* Give typedefs the right scope. */
25406 context_die = scope_die_for (type, context_die);
25407
25408 TREE_ASM_WRITTEN (type) = 1;
25409
25410 gen_decl_die (name, NULL, NULL, context_die);
25411 return;
25412 }
25413
25414 /* If type is an anonymous tagged type named by a typedef, let's
25415 generate debug info for the typedef. */
25416 if (is_naming_typedef_decl (TYPE_NAME (type)))
25417 {
25418 /* Give typedefs the right scope. */
25419 context_die = scope_die_for (type, context_die);
25420
25421 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25422 return;
25423 }
25424
25425 if (lang_hooks.types.get_debug_type)
25426 {
25427 tree debug_type = lang_hooks.types.get_debug_type (type);
25428
25429 if (debug_type != NULL_TREE && debug_type != type)
25430 {
25431 gen_type_die_with_usage (debug_type, context_die, usage);
25432 return;
25433 }
25434 }
25435
25436 /* We are going to output a DIE to represent the unqualified version
25437 of this type (i.e. without any const or volatile qualifiers) so
25438 get the main variant (i.e. the unqualified version) of this type
25439 now. (Vectors and arrays are special because the debugging info is in the
25440 cloned type itself. Similarly function/method types can contain extra
25441 ref-qualification). */
25442 if (TREE_CODE (type) == FUNCTION_TYPE
25443 || TREE_CODE (type) == METHOD_TYPE)
25444 {
25445 /* For function/method types, can't use type_main_variant here,
25446 because that can have different ref-qualifiers for C++,
25447 but try to canonicalize. */
25448 tree main = TYPE_MAIN_VARIANT (type);
25449 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25450 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25451 && check_base_type (t, main)
25452 && check_lang_type (t, type))
25453 {
25454 type = t;
25455 break;
25456 }
25457 }
25458 else if (TREE_CODE (type) != VECTOR_TYPE
25459 && TREE_CODE (type) != ARRAY_TYPE)
25460 type = type_main_variant (type);
25461
25462 /* If this is an array type with hidden descriptor, handle it first. */
25463 if (!TREE_ASM_WRITTEN (type)
25464 && lang_hooks.types.get_array_descr_info)
25465 {
25466 memset (&info, 0, sizeof (info));
25467 if (lang_hooks.types.get_array_descr_info (type, &info))
25468 {
25469 /* Fortran sometimes emits array types with no dimension. */
25470 gcc_assert (info.ndimensions >= 0
25471 && (info.ndimensions
25472 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25473 gen_descr_array_type_die (type, &info, context_die);
25474 TREE_ASM_WRITTEN (type) = 1;
25475 return;
25476 }
25477 }
25478
25479 if (TREE_ASM_WRITTEN (type))
25480 {
25481 /* Variable-length types may be incomplete even if
25482 TREE_ASM_WRITTEN. For such types, fall through to
25483 gen_array_type_die() and possibly fill in
25484 DW_AT_{upper,lower}_bound attributes. */
25485 if ((TREE_CODE (type) != ARRAY_TYPE
25486 && TREE_CODE (type) != RECORD_TYPE
25487 && TREE_CODE (type) != UNION_TYPE
25488 && TREE_CODE (type) != QUAL_UNION_TYPE)
25489 || !variably_modified_type_p (type, NULL))
25490 return;
25491 }
25492
25493 switch (TREE_CODE (type))
25494 {
25495 case ERROR_MARK:
25496 break;
25497
25498 case POINTER_TYPE:
25499 case REFERENCE_TYPE:
25500 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25501 ensures that the gen_type_die recursion will terminate even if the
25502 type is recursive. Recursive types are possible in Ada. */
25503 /* ??? We could perhaps do this for all types before the switch
25504 statement. */
25505 TREE_ASM_WRITTEN (type) = 1;
25506
25507 /* For these types, all that is required is that we output a DIE (or a
25508 set of DIEs) to represent the "basis" type. */
25509 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25510 DINFO_USAGE_IND_USE);
25511 break;
25512
25513 case OFFSET_TYPE:
25514 /* This code is used for C++ pointer-to-data-member types.
25515 Output a description of the relevant class type. */
25516 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25517 DINFO_USAGE_IND_USE);
25518
25519 /* Output a description of the type of the object pointed to. */
25520 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25521 DINFO_USAGE_IND_USE);
25522
25523 /* Now output a DIE to represent this pointer-to-data-member type
25524 itself. */
25525 gen_ptr_to_mbr_type_die (type, context_die);
25526 break;
25527
25528 case FUNCTION_TYPE:
25529 /* Force out return type (in case it wasn't forced out already). */
25530 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25531 DINFO_USAGE_DIR_USE);
25532 gen_subroutine_type_die (type, context_die);
25533 break;
25534
25535 case METHOD_TYPE:
25536 /* Force out return type (in case it wasn't forced out already). */
25537 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25538 DINFO_USAGE_DIR_USE);
25539 gen_subroutine_type_die (type, context_die);
25540 break;
25541
25542 case ARRAY_TYPE:
25543 case VECTOR_TYPE:
25544 gen_array_type_die (type, context_die);
25545 break;
25546
25547 case ENUMERAL_TYPE:
25548 case RECORD_TYPE:
25549 case UNION_TYPE:
25550 case QUAL_UNION_TYPE:
25551 gen_tagged_type_die (type, context_die, usage);
25552 return;
25553
25554 case VOID_TYPE:
25555 case INTEGER_TYPE:
25556 case REAL_TYPE:
25557 case FIXED_POINT_TYPE:
25558 case COMPLEX_TYPE:
25559 case BOOLEAN_TYPE:
25560 /* No DIEs needed for fundamental types. */
25561 break;
25562
25563 case NULLPTR_TYPE:
25564 case LANG_TYPE:
25565 /* Just use DW_TAG_unspecified_type. */
25566 {
25567 dw_die_ref type_die = lookup_type_die (type);
25568 if (type_die == NULL)
25569 {
25570 tree name = TYPE_IDENTIFIER (type);
25571 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25572 type);
25573 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25574 equate_type_number_to_die (type, type_die);
25575 }
25576 }
25577 break;
25578
25579 default:
25580 if (is_cxx_auto (type))
25581 {
25582 tree name = TYPE_IDENTIFIER (type);
25583 dw_die_ref *die = (name == get_identifier ("auto")
25584 ? &auto_die : &decltype_auto_die);
25585 if (!*die)
25586 {
25587 *die = new_die (DW_TAG_unspecified_type,
25588 comp_unit_die (), NULL_TREE);
25589 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25590 }
25591 equate_type_number_to_die (type, *die);
25592 break;
25593 }
25594 gcc_unreachable ();
25595 }
25596
25597 TREE_ASM_WRITTEN (type) = 1;
25598 }
25599
25600 static void
25601 gen_type_die (tree type, dw_die_ref context_die)
25602 {
25603 if (type != error_mark_node)
25604 {
25605 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25606 if (flag_checking)
25607 {
25608 dw_die_ref die = lookup_type_die (type);
25609 if (die)
25610 check_die (die);
25611 }
25612 }
25613 }
25614
25615 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25616 things which are local to the given block. */
25617
25618 static void
25619 gen_block_die (tree stmt, dw_die_ref context_die)
25620 {
25621 int must_output_die = 0;
25622 bool inlined_func;
25623
25624 /* Ignore blocks that are NULL. */
25625 if (stmt == NULL_TREE)
25626 return;
25627
25628 inlined_func = inlined_function_outer_scope_p (stmt);
25629
25630 /* If the block is one fragment of a non-contiguous block, do not
25631 process the variables, since they will have been done by the
25632 origin block. Do process subblocks. */
25633 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25634 {
25635 tree sub;
25636
25637 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25638 gen_block_die (sub, context_die);
25639
25640 return;
25641 }
25642
25643 /* Determine if we need to output any Dwarf DIEs at all to represent this
25644 block. */
25645 if (inlined_func)
25646 /* The outer scopes for inlinings *must* always be represented. We
25647 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25648 must_output_die = 1;
25649 else if (BLOCK_DIE (stmt))
25650 /* If we already have a DIE then it was filled early. Meanwhile
25651 we might have pruned all BLOCK_VARS as optimized out but we
25652 still want to generate high/low PC attributes so output it. */
25653 must_output_die = 1;
25654 else if (TREE_USED (stmt)
25655 || TREE_ASM_WRITTEN (stmt)
25656 || BLOCK_ABSTRACT (stmt))
25657 {
25658 /* Determine if this block directly contains any "significant"
25659 local declarations which we will need to output DIEs for. */
25660 if (debug_info_level > DINFO_LEVEL_TERSE)
25661 {
25662 /* We are not in terse mode so any local declaration that
25663 is not ignored for debug purposes counts as being a
25664 "significant" one. */
25665 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25666 must_output_die = 1;
25667 else
25668 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25669 if (!DECL_IGNORED_P (var))
25670 {
25671 must_output_die = 1;
25672 break;
25673 }
25674 }
25675 else if (!dwarf2out_ignore_block (stmt))
25676 must_output_die = 1;
25677 }
25678
25679 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25680 DIE for any block which contains no significant local declarations at
25681 all. Rather, in such cases we just call `decls_for_scope' so that any
25682 needed Dwarf info for any sub-blocks will get properly generated. Note
25683 that in terse mode, our definition of what constitutes a "significant"
25684 local declaration gets restricted to include only inlined function
25685 instances and local (nested) function definitions. */
25686 if (must_output_die)
25687 {
25688 if (inlined_func)
25689 {
25690 /* If STMT block is abstract, that means we have been called
25691 indirectly from dwarf2out_abstract_function.
25692 That function rightfully marks the descendent blocks (of
25693 the abstract function it is dealing with) as being abstract,
25694 precisely to prevent us from emitting any
25695 DW_TAG_inlined_subroutine DIE as a descendent
25696 of an abstract function instance. So in that case, we should
25697 not call gen_inlined_subroutine_die.
25698
25699 Later though, when cgraph asks dwarf2out to emit info
25700 for the concrete instance of the function decl into which
25701 the concrete instance of STMT got inlined, the later will lead
25702 to the generation of a DW_TAG_inlined_subroutine DIE. */
25703 if (! BLOCK_ABSTRACT (stmt))
25704 gen_inlined_subroutine_die (stmt, context_die);
25705 }
25706 else
25707 gen_lexical_block_die (stmt, context_die);
25708 }
25709 else
25710 decls_for_scope (stmt, context_die);
25711 }
25712
25713 /* Process variable DECL (or variable with origin ORIGIN) within
25714 block STMT and add it to CONTEXT_DIE. */
25715 static void
25716 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25717 {
25718 dw_die_ref die;
25719 tree decl_or_origin = decl ? decl : origin;
25720
25721 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25722 die = lookup_decl_die (decl_or_origin);
25723 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25724 {
25725 if (TYPE_DECL_IS_STUB (decl_or_origin))
25726 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25727 else
25728 die = lookup_decl_die (decl_or_origin);
25729 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25730 if (! die && ! early_dwarf)
25731 return;
25732 }
25733 else
25734 die = NULL;
25735
25736 /* Avoid creating DIEs for local typedefs and concrete static variables that
25737 will only be pruned later. */
25738 if ((origin || decl_ultimate_origin (decl))
25739 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25740 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25741 {
25742 origin = decl_ultimate_origin (decl_or_origin);
25743 if (decl && VAR_P (decl) && die != NULL)
25744 {
25745 die = lookup_decl_die (origin);
25746 if (die != NULL)
25747 equate_decl_number_to_die (decl, die);
25748 }
25749 return;
25750 }
25751
25752 if (die != NULL && die->die_parent == NULL)
25753 add_child_die (context_die, die);
25754 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25755 {
25756 if (early_dwarf)
25757 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25758 stmt, context_die);
25759 }
25760 else
25761 {
25762 if (decl && DECL_P (decl))
25763 {
25764 die = lookup_decl_die (decl);
25765
25766 /* Early created DIEs do not have a parent as the decls refer
25767 to the function as DECL_CONTEXT rather than the BLOCK. */
25768 if (die && die->die_parent == NULL)
25769 {
25770 gcc_assert (in_lto_p);
25771 add_child_die (context_die, die);
25772 }
25773 }
25774
25775 gen_decl_die (decl, origin, NULL, context_die);
25776 }
25777 }
25778
25779 /* Generate all of the decls declared within a given scope and (recursively)
25780 all of its sub-blocks. */
25781
25782 static void
25783 decls_for_scope (tree stmt, dw_die_ref context_die)
25784 {
25785 tree decl;
25786 unsigned int i;
25787 tree subblocks;
25788
25789 /* Ignore NULL blocks. */
25790 if (stmt == NULL_TREE)
25791 return;
25792
25793 /* Output the DIEs to represent all of the data objects and typedefs
25794 declared directly within this block but not within any nested
25795 sub-blocks. Also, nested function and tag DIEs have been
25796 generated with a parent of NULL; fix that up now. We don't
25797 have to do this if we're at -g1. */
25798 if (debug_info_level > DINFO_LEVEL_TERSE)
25799 {
25800 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25801 process_scope_var (stmt, decl, NULL_TREE, context_die);
25802 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25803 origin - avoid doing this twice as we have no good way to see
25804 if we've done it once already. */
25805 if (! early_dwarf)
25806 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25807 {
25808 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25809 if (decl == current_function_decl)
25810 /* Ignore declarations of the current function, while they
25811 are declarations, gen_subprogram_die would treat them
25812 as definitions again, because they are equal to
25813 current_function_decl and endlessly recurse. */;
25814 else if (TREE_CODE (decl) == FUNCTION_DECL)
25815 process_scope_var (stmt, decl, NULL_TREE, context_die);
25816 else
25817 process_scope_var (stmt, NULL_TREE, decl, context_die);
25818 }
25819 }
25820
25821 /* Even if we're at -g1, we need to process the subblocks in order to get
25822 inlined call information. */
25823
25824 /* Output the DIEs to represent all sub-blocks (and the items declared
25825 therein) of this block. */
25826 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25827 subblocks != NULL;
25828 subblocks = BLOCK_CHAIN (subblocks))
25829 gen_block_die (subblocks, context_die);
25830 }
25831
25832 /* Is this a typedef we can avoid emitting? */
25833
25834 bool
25835 is_redundant_typedef (const_tree decl)
25836 {
25837 if (TYPE_DECL_IS_STUB (decl))
25838 return true;
25839
25840 if (DECL_ARTIFICIAL (decl)
25841 && DECL_CONTEXT (decl)
25842 && is_tagged_type (DECL_CONTEXT (decl))
25843 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25844 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25845 /* Also ignore the artificial member typedef for the class name. */
25846 return true;
25847
25848 return false;
25849 }
25850
25851 /* Return TRUE if TYPE is a typedef that names a type for linkage
25852 purposes. This kind of typedefs is produced by the C++ FE for
25853 constructs like:
25854
25855 typedef struct {...} foo;
25856
25857 In that case, there is no typedef variant type produced for foo.
25858 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25859 struct type. */
25860
25861 static bool
25862 is_naming_typedef_decl (const_tree decl)
25863 {
25864 if (decl == NULL_TREE
25865 || TREE_CODE (decl) != TYPE_DECL
25866 || DECL_NAMELESS (decl)
25867 || !is_tagged_type (TREE_TYPE (decl))
25868 || DECL_IS_BUILTIN (decl)
25869 || is_redundant_typedef (decl)
25870 /* It looks like Ada produces TYPE_DECLs that are very similar
25871 to C++ naming typedefs but that have different
25872 semantics. Let's be specific to c++ for now. */
25873 || !is_cxx (decl))
25874 return FALSE;
25875
25876 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25877 && TYPE_NAME (TREE_TYPE (decl)) == decl
25878 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25879 != TYPE_NAME (TREE_TYPE (decl))));
25880 }
25881
25882 /* Looks up the DIE for a context. */
25883
25884 static inline dw_die_ref
25885 lookup_context_die (tree context)
25886 {
25887 if (context)
25888 {
25889 /* Find die that represents this context. */
25890 if (TYPE_P (context))
25891 {
25892 context = TYPE_MAIN_VARIANT (context);
25893 dw_die_ref ctx = lookup_type_die (context);
25894 if (!ctx)
25895 return NULL;
25896 return strip_naming_typedef (context, ctx);
25897 }
25898 else
25899 return lookup_decl_die (context);
25900 }
25901 return comp_unit_die ();
25902 }
25903
25904 /* Returns the DIE for a context. */
25905
25906 static inline dw_die_ref
25907 get_context_die (tree context)
25908 {
25909 if (context)
25910 {
25911 /* Find die that represents this context. */
25912 if (TYPE_P (context))
25913 {
25914 context = TYPE_MAIN_VARIANT (context);
25915 return strip_naming_typedef (context, force_type_die (context));
25916 }
25917 else
25918 return force_decl_die (context);
25919 }
25920 return comp_unit_die ();
25921 }
25922
25923 /* Returns the DIE for decl. A DIE will always be returned. */
25924
25925 static dw_die_ref
25926 force_decl_die (tree decl)
25927 {
25928 dw_die_ref decl_die;
25929 unsigned saved_external_flag;
25930 tree save_fn = NULL_TREE;
25931 decl_die = lookup_decl_die (decl);
25932 if (!decl_die)
25933 {
25934 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25935
25936 decl_die = lookup_decl_die (decl);
25937 if (decl_die)
25938 return decl_die;
25939
25940 switch (TREE_CODE (decl))
25941 {
25942 case FUNCTION_DECL:
25943 /* Clear current_function_decl, so that gen_subprogram_die thinks
25944 that this is a declaration. At this point, we just want to force
25945 declaration die. */
25946 save_fn = current_function_decl;
25947 current_function_decl = NULL_TREE;
25948 gen_subprogram_die (decl, context_die);
25949 current_function_decl = save_fn;
25950 break;
25951
25952 case VAR_DECL:
25953 /* Set external flag to force declaration die. Restore it after
25954 gen_decl_die() call. */
25955 saved_external_flag = DECL_EXTERNAL (decl);
25956 DECL_EXTERNAL (decl) = 1;
25957 gen_decl_die (decl, NULL, NULL, context_die);
25958 DECL_EXTERNAL (decl) = saved_external_flag;
25959 break;
25960
25961 case NAMESPACE_DECL:
25962 if (dwarf_version >= 3 || !dwarf_strict)
25963 dwarf2out_decl (decl);
25964 else
25965 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25966 decl_die = comp_unit_die ();
25967 break;
25968
25969 case TRANSLATION_UNIT_DECL:
25970 decl_die = comp_unit_die ();
25971 break;
25972
25973 default:
25974 gcc_unreachable ();
25975 }
25976
25977 /* We should be able to find the DIE now. */
25978 if (!decl_die)
25979 decl_die = lookup_decl_die (decl);
25980 gcc_assert (decl_die);
25981 }
25982
25983 return decl_die;
25984 }
25985
25986 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25987 always returned. */
25988
25989 static dw_die_ref
25990 force_type_die (tree type)
25991 {
25992 dw_die_ref type_die;
25993
25994 type_die = lookup_type_die (type);
25995 if (!type_die)
25996 {
25997 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25998
25999 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
26000 false, context_die);
26001 gcc_assert (type_die);
26002 }
26003 return type_die;
26004 }
26005
26006 /* Force out any required namespaces to be able to output DECL,
26007 and return the new context_die for it, if it's changed. */
26008
26009 static dw_die_ref
26010 setup_namespace_context (tree thing, dw_die_ref context_die)
26011 {
26012 tree context = (DECL_P (thing)
26013 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26014 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26015 /* Force out the namespace. */
26016 context_die = force_decl_die (context);
26017
26018 return context_die;
26019 }
26020
26021 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26022 type) within its namespace, if appropriate.
26023
26024 For compatibility with older debuggers, namespace DIEs only contain
26025 declarations; all definitions are emitted at CU scope, with
26026 DW_AT_specification pointing to the declaration (like with class
26027 members). */
26028
26029 static dw_die_ref
26030 declare_in_namespace (tree thing, dw_die_ref context_die)
26031 {
26032 dw_die_ref ns_context;
26033
26034 if (debug_info_level <= DINFO_LEVEL_TERSE)
26035 return context_die;
26036
26037 /* External declarations in the local scope only need to be emitted
26038 once, not once in the namespace and once in the scope.
26039
26040 This avoids declaring the `extern' below in the
26041 namespace DIE as well as in the innermost scope:
26042
26043 namespace S
26044 {
26045 int i=5;
26046 int foo()
26047 {
26048 int i=8;
26049 extern int i;
26050 return i;
26051 }
26052 }
26053 */
26054 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26055 return context_die;
26056
26057 /* If this decl is from an inlined function, then don't try to emit it in its
26058 namespace, as we will get confused. It would have already been emitted
26059 when the abstract instance of the inline function was emitted anyways. */
26060 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26061 return context_die;
26062
26063 ns_context = setup_namespace_context (thing, context_die);
26064
26065 if (ns_context != context_die)
26066 {
26067 if (is_fortran ())
26068 return ns_context;
26069 if (DECL_P (thing))
26070 gen_decl_die (thing, NULL, NULL, ns_context);
26071 else
26072 gen_type_die (thing, ns_context);
26073 }
26074 return context_die;
26075 }
26076
26077 /* Generate a DIE for a namespace or namespace alias. */
26078
26079 static void
26080 gen_namespace_die (tree decl, dw_die_ref context_die)
26081 {
26082 dw_die_ref namespace_die;
26083
26084 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26085 they are an alias of. */
26086 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26087 {
26088 /* Output a real namespace or module. */
26089 context_die = setup_namespace_context (decl, comp_unit_die ());
26090 namespace_die = new_die (is_fortran ()
26091 ? DW_TAG_module : DW_TAG_namespace,
26092 context_die, decl);
26093 /* For Fortran modules defined in different CU don't add src coords. */
26094 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26095 {
26096 const char *name = dwarf2_name (decl, 0);
26097 if (name)
26098 add_name_attribute (namespace_die, name);
26099 }
26100 else
26101 add_name_and_src_coords_attributes (namespace_die, decl);
26102 if (DECL_EXTERNAL (decl))
26103 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26104 equate_decl_number_to_die (decl, namespace_die);
26105 }
26106 else
26107 {
26108 /* Output a namespace alias. */
26109
26110 /* Force out the namespace we are an alias of, if necessary. */
26111 dw_die_ref origin_die
26112 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26113
26114 if (DECL_FILE_SCOPE_P (decl)
26115 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26116 context_die = setup_namespace_context (decl, comp_unit_die ());
26117 /* Now create the namespace alias DIE. */
26118 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26119 add_name_and_src_coords_attributes (namespace_die, decl);
26120 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26121 equate_decl_number_to_die (decl, namespace_die);
26122 }
26123 if ((dwarf_version >= 5 || !dwarf_strict)
26124 && lang_hooks.decls.decl_dwarf_attribute (decl,
26125 DW_AT_export_symbols) == 1)
26126 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26127
26128 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26129 if (want_pubnames ())
26130 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26131 }
26132
26133 /* Generate Dwarf debug information for a decl described by DECL.
26134 The return value is currently only meaningful for PARM_DECLs,
26135 for all other decls it returns NULL.
26136
26137 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26138 It can be NULL otherwise. */
26139
26140 static dw_die_ref
26141 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26142 dw_die_ref context_die)
26143 {
26144 tree decl_or_origin = decl ? decl : origin;
26145 tree class_origin = NULL, ultimate_origin;
26146
26147 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26148 return NULL;
26149
26150 switch (TREE_CODE (decl_or_origin))
26151 {
26152 case ERROR_MARK:
26153 break;
26154
26155 case CONST_DECL:
26156 if (!is_fortran () && !is_ada ())
26157 {
26158 /* The individual enumerators of an enum type get output when we output
26159 the Dwarf representation of the relevant enum type itself. */
26160 break;
26161 }
26162
26163 /* Emit its type. */
26164 gen_type_die (TREE_TYPE (decl), context_die);
26165
26166 /* And its containing namespace. */
26167 context_die = declare_in_namespace (decl, context_die);
26168
26169 gen_const_die (decl, context_die);
26170 break;
26171
26172 case FUNCTION_DECL:
26173 #if 0
26174 /* FIXME */
26175 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26176 on local redeclarations of global functions. That seems broken. */
26177 if (current_function_decl != decl)
26178 /* This is only a declaration. */;
26179 #endif
26180
26181 /* We should have abstract copies already and should not generate
26182 stray type DIEs in late LTO dumping. */
26183 if (! early_dwarf)
26184 ;
26185
26186 /* If we're emitting a clone, emit info for the abstract instance. */
26187 else if (origin || DECL_ORIGIN (decl) != decl)
26188 dwarf2out_abstract_function (origin
26189 ? DECL_ORIGIN (origin)
26190 : DECL_ABSTRACT_ORIGIN (decl));
26191
26192 /* If we're emitting a possibly inlined function emit it as
26193 abstract instance. */
26194 else if (cgraph_function_possibly_inlined_p (decl)
26195 && ! DECL_ABSTRACT_P (decl)
26196 && ! class_or_namespace_scope_p (context_die)
26197 /* dwarf2out_abstract_function won't emit a die if this is just
26198 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26199 that case, because that works only if we have a die. */
26200 && DECL_INITIAL (decl) != NULL_TREE)
26201 dwarf2out_abstract_function (decl);
26202
26203 /* Otherwise we're emitting the primary DIE for this decl. */
26204 else if (debug_info_level > DINFO_LEVEL_TERSE)
26205 {
26206 /* Before we describe the FUNCTION_DECL itself, make sure that we
26207 have its containing type. */
26208 if (!origin)
26209 origin = decl_class_context (decl);
26210 if (origin != NULL_TREE)
26211 gen_type_die (origin, context_die);
26212
26213 /* And its return type. */
26214 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26215
26216 /* And its virtual context. */
26217 if (DECL_VINDEX (decl) != NULL_TREE)
26218 gen_type_die (DECL_CONTEXT (decl), context_die);
26219
26220 /* Make sure we have a member DIE for decl. */
26221 if (origin != NULL_TREE)
26222 gen_type_die_for_member (origin, decl, context_die);
26223
26224 /* And its containing namespace. */
26225 context_die = declare_in_namespace (decl, context_die);
26226 }
26227
26228 /* Now output a DIE to represent the function itself. */
26229 if (decl)
26230 gen_subprogram_die (decl, context_die);
26231 break;
26232
26233 case TYPE_DECL:
26234 /* If we are in terse mode, don't generate any DIEs to represent any
26235 actual typedefs. */
26236 if (debug_info_level <= DINFO_LEVEL_TERSE)
26237 break;
26238
26239 /* In the special case of a TYPE_DECL node representing the declaration
26240 of some type tag, if the given TYPE_DECL is marked as having been
26241 instantiated from some other (original) TYPE_DECL node (e.g. one which
26242 was generated within the original definition of an inline function) we
26243 used to generate a special (abbreviated) DW_TAG_structure_type,
26244 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26245 should be actually referencing those DIEs, as variable DIEs with that
26246 type would be emitted already in the abstract origin, so it was always
26247 removed during unused type prunning. Don't add anything in this
26248 case. */
26249 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26250 break;
26251
26252 if (is_redundant_typedef (decl))
26253 gen_type_die (TREE_TYPE (decl), context_die);
26254 else
26255 /* Output a DIE to represent the typedef itself. */
26256 gen_typedef_die (decl, context_die);
26257 break;
26258
26259 case LABEL_DECL:
26260 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26261 gen_label_die (decl, context_die);
26262 break;
26263
26264 case VAR_DECL:
26265 case RESULT_DECL:
26266 /* If we are in terse mode, don't generate any DIEs to represent any
26267 variable declarations or definitions. */
26268 if (debug_info_level <= DINFO_LEVEL_TERSE)
26269 break;
26270
26271 /* Avoid generating stray type DIEs during late dwarf dumping.
26272 All types have been dumped early. */
26273 if (early_dwarf
26274 /* ??? But in LTRANS we cannot annotate early created variably
26275 modified type DIEs without copying them and adjusting all
26276 references to them. Dump them again as happens for inlining
26277 which copies both the decl and the types. */
26278 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26279 in VLA bound information for example. */
26280 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26281 current_function_decl)))
26282 {
26283 /* Output any DIEs that are needed to specify the type of this data
26284 object. */
26285 if (decl_by_reference_p (decl_or_origin))
26286 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26287 else
26288 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26289 }
26290
26291 if (early_dwarf)
26292 {
26293 /* And its containing type. */
26294 class_origin = decl_class_context (decl_or_origin);
26295 if (class_origin != NULL_TREE)
26296 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26297
26298 /* And its containing namespace. */
26299 context_die = declare_in_namespace (decl_or_origin, context_die);
26300 }
26301
26302 /* Now output the DIE to represent the data object itself. This gets
26303 complicated because of the possibility that the VAR_DECL really
26304 represents an inlined instance of a formal parameter for an inline
26305 function. */
26306 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26307 if (ultimate_origin != NULL_TREE
26308 && TREE_CODE (ultimate_origin) == PARM_DECL)
26309 gen_formal_parameter_die (decl, origin,
26310 true /* Emit name attribute. */,
26311 context_die);
26312 else
26313 gen_variable_die (decl, origin, context_die);
26314 break;
26315
26316 case FIELD_DECL:
26317 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26318 /* Ignore the nameless fields that are used to skip bits but handle C++
26319 anonymous unions and structs. */
26320 if (DECL_NAME (decl) != NULL_TREE
26321 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26322 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26323 {
26324 gen_type_die (member_declared_type (decl), context_die);
26325 gen_field_die (decl, ctx, context_die);
26326 }
26327 break;
26328
26329 case PARM_DECL:
26330 /* Avoid generating stray type DIEs during late dwarf dumping.
26331 All types have been dumped early. */
26332 if (early_dwarf
26333 /* ??? But in LTRANS we cannot annotate early created variably
26334 modified type DIEs without copying them and adjusting all
26335 references to them. Dump them again as happens for inlining
26336 which copies both the decl and the types. */
26337 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26338 in VLA bound information for example. */
26339 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26340 current_function_decl)))
26341 {
26342 if (DECL_BY_REFERENCE (decl_or_origin))
26343 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26344 else
26345 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26346 }
26347 return gen_formal_parameter_die (decl, origin,
26348 true /* Emit name attribute. */,
26349 context_die);
26350
26351 case NAMESPACE_DECL:
26352 if (dwarf_version >= 3 || !dwarf_strict)
26353 gen_namespace_die (decl, context_die);
26354 break;
26355
26356 case IMPORTED_DECL:
26357 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26358 DECL_CONTEXT (decl), context_die);
26359 break;
26360
26361 case NAMELIST_DECL:
26362 gen_namelist_decl (DECL_NAME (decl), context_die,
26363 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26364 break;
26365
26366 default:
26367 /* Probably some frontend-internal decl. Assume we don't care. */
26368 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26369 break;
26370 }
26371
26372 return NULL;
26373 }
26374 \f
26375 /* Output initial debug information for global DECL. Called at the
26376 end of the parsing process.
26377
26378 This is the initial debug generation process. As such, the DIEs
26379 generated may be incomplete. A later debug generation pass
26380 (dwarf2out_late_global_decl) will augment the information generated
26381 in this pass (e.g., with complete location info). */
26382
26383 static void
26384 dwarf2out_early_global_decl (tree decl)
26385 {
26386 set_early_dwarf s;
26387
26388 /* gen_decl_die() will set DECL_ABSTRACT because
26389 cgraph_function_possibly_inlined_p() returns true. This is in
26390 turn will cause DW_AT_inline attributes to be set.
26391
26392 This happens because at early dwarf generation, there is no
26393 cgraph information, causing cgraph_function_possibly_inlined_p()
26394 to return true. Trick cgraph_function_possibly_inlined_p()
26395 while we generate dwarf early. */
26396 bool save = symtab->global_info_ready;
26397 symtab->global_info_ready = true;
26398
26399 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26400 other DECLs and they can point to template types or other things
26401 that dwarf2out can't handle when done via dwarf2out_decl. */
26402 if (TREE_CODE (decl) != TYPE_DECL
26403 && TREE_CODE (decl) != PARM_DECL)
26404 {
26405 if (TREE_CODE (decl) == FUNCTION_DECL)
26406 {
26407 tree save_fndecl = current_function_decl;
26408
26409 /* For nested functions, make sure we have DIEs for the parents first
26410 so that all nested DIEs are generated at the proper scope in the
26411 first shot. */
26412 tree context = decl_function_context (decl);
26413 if (context != NULL)
26414 {
26415 dw_die_ref context_die = lookup_decl_die (context);
26416 current_function_decl = context;
26417
26418 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26419 enough so that it lands in its own context. This avoids type
26420 pruning issues later on. */
26421 if (context_die == NULL || is_declaration_die (context_die))
26422 dwarf2out_decl (context);
26423 }
26424
26425 /* Emit an abstract origin of a function first. This happens
26426 with C++ constructor clones for example and makes
26427 dwarf2out_abstract_function happy which requires the early
26428 DIE of the abstract instance to be present. */
26429 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26430 dw_die_ref origin_die;
26431 if (origin != NULL
26432 /* Do not emit the DIE multiple times but make sure to
26433 process it fully here in case we just saw a declaration. */
26434 && ((origin_die = lookup_decl_die (origin)) == NULL
26435 || is_declaration_die (origin_die)))
26436 {
26437 current_function_decl = origin;
26438 dwarf2out_decl (origin);
26439 }
26440
26441 /* Emit the DIE for decl but avoid doing that multiple times. */
26442 dw_die_ref old_die;
26443 if ((old_die = lookup_decl_die (decl)) == NULL
26444 || is_declaration_die (old_die))
26445 {
26446 current_function_decl = decl;
26447 dwarf2out_decl (decl);
26448 }
26449
26450 current_function_decl = save_fndecl;
26451 }
26452 else
26453 dwarf2out_decl (decl);
26454 }
26455 symtab->global_info_ready = save;
26456 }
26457
26458 /* Return whether EXPR is an expression with the following pattern:
26459 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26460
26461 static bool
26462 is_trivial_indirect_ref (tree expr)
26463 {
26464 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26465 return false;
26466
26467 tree nop = TREE_OPERAND (expr, 0);
26468 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26469 return false;
26470
26471 tree int_cst = TREE_OPERAND (nop, 0);
26472 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26473 }
26474
26475 /* Output debug information for global decl DECL. Called from
26476 toplev.c after compilation proper has finished. */
26477
26478 static void
26479 dwarf2out_late_global_decl (tree decl)
26480 {
26481 /* Fill-in any location information we were unable to determine
26482 on the first pass. */
26483 if (VAR_P (decl))
26484 {
26485 dw_die_ref die = lookup_decl_die (decl);
26486
26487 /* We may have to generate early debug late for LTO in case debug
26488 was not enabled at compile-time or the target doesn't support
26489 the LTO early debug scheme. */
26490 if (! die && in_lto_p)
26491 {
26492 dwarf2out_decl (decl);
26493 die = lookup_decl_die (decl);
26494 }
26495
26496 if (die)
26497 {
26498 /* We get called via the symtab code invoking late_global_decl
26499 for symbols that are optimized out.
26500
26501 Do not add locations for those, except if they have a
26502 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26503 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26504 INDIRECT_REF expression, as this could generate relocations to
26505 text symbols in LTO object files, which is invalid. */
26506 varpool_node *node = varpool_node::get (decl);
26507 if ((! node || ! node->definition)
26508 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26509 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26510 tree_add_const_value_attribute_for_decl (die, decl);
26511 else
26512 add_location_or_const_value_attribute (die, decl, false);
26513 }
26514 }
26515 }
26516
26517 /* Output debug information for type decl DECL. Called from toplev.c
26518 and from language front ends (to record built-in types). */
26519 static void
26520 dwarf2out_type_decl (tree decl, int local)
26521 {
26522 if (!local)
26523 {
26524 set_early_dwarf s;
26525 dwarf2out_decl (decl);
26526 }
26527 }
26528
26529 /* Output debug information for imported module or decl DECL.
26530 NAME is non-NULL name in the lexical block if the decl has been renamed.
26531 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26532 that DECL belongs to.
26533 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26534 static void
26535 dwarf2out_imported_module_or_decl_1 (tree decl,
26536 tree name,
26537 tree lexical_block,
26538 dw_die_ref lexical_block_die)
26539 {
26540 expanded_location xloc;
26541 dw_die_ref imported_die = NULL;
26542 dw_die_ref at_import_die;
26543
26544 if (TREE_CODE (decl) == IMPORTED_DECL)
26545 {
26546 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26547 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26548 gcc_assert (decl);
26549 }
26550 else
26551 xloc = expand_location (input_location);
26552
26553 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26554 {
26555 at_import_die = force_type_die (TREE_TYPE (decl));
26556 /* For namespace N { typedef void T; } using N::T; base_type_die
26557 returns NULL, but DW_TAG_imported_declaration requires
26558 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26559 if (!at_import_die)
26560 {
26561 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26562 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26563 at_import_die = lookup_type_die (TREE_TYPE (decl));
26564 gcc_assert (at_import_die);
26565 }
26566 }
26567 else
26568 {
26569 at_import_die = lookup_decl_die (decl);
26570 if (!at_import_die)
26571 {
26572 /* If we're trying to avoid duplicate debug info, we may not have
26573 emitted the member decl for this field. Emit it now. */
26574 if (TREE_CODE (decl) == FIELD_DECL)
26575 {
26576 tree type = DECL_CONTEXT (decl);
26577
26578 if (TYPE_CONTEXT (type)
26579 && TYPE_P (TYPE_CONTEXT (type))
26580 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26581 DINFO_USAGE_DIR_USE))
26582 return;
26583 gen_type_die_for_member (type, decl,
26584 get_context_die (TYPE_CONTEXT (type)));
26585 }
26586 if (TREE_CODE (decl) == NAMELIST_DECL)
26587 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26588 get_context_die (DECL_CONTEXT (decl)),
26589 NULL_TREE);
26590 else
26591 at_import_die = force_decl_die (decl);
26592 }
26593 }
26594
26595 if (TREE_CODE (decl) == NAMESPACE_DECL)
26596 {
26597 if (dwarf_version >= 3 || !dwarf_strict)
26598 imported_die = new_die (DW_TAG_imported_module,
26599 lexical_block_die,
26600 lexical_block);
26601 else
26602 return;
26603 }
26604 else
26605 imported_die = new_die (DW_TAG_imported_declaration,
26606 lexical_block_die,
26607 lexical_block);
26608
26609 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26610 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26611 if (debug_column_info && xloc.column)
26612 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26613 if (name)
26614 add_AT_string (imported_die, DW_AT_name,
26615 IDENTIFIER_POINTER (name));
26616 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26617 }
26618
26619 /* Output debug information for imported module or decl DECL.
26620 NAME is non-NULL name in context if the decl has been renamed.
26621 CHILD is true if decl is one of the renamed decls as part of
26622 importing whole module.
26623 IMPLICIT is set if this hook is called for an implicit import
26624 such as inline namespace. */
26625
26626 static void
26627 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26628 bool child, bool implicit)
26629 {
26630 /* dw_die_ref at_import_die; */
26631 dw_die_ref scope_die;
26632
26633 if (debug_info_level <= DINFO_LEVEL_TERSE)
26634 return;
26635
26636 gcc_assert (decl);
26637
26638 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26639 should be enough, for DWARF4 and older even if we emit as extension
26640 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26641 for the benefit of consumers unaware of DW_AT_export_symbols. */
26642 if (implicit
26643 && dwarf_version >= 5
26644 && lang_hooks.decls.decl_dwarf_attribute (decl,
26645 DW_AT_export_symbols) == 1)
26646 return;
26647
26648 set_early_dwarf s;
26649
26650 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26651 We need decl DIE for reference and scope die. First, get DIE for the decl
26652 itself. */
26653
26654 /* Get the scope die for decl context. Use comp_unit_die for global module
26655 or decl. If die is not found for non globals, force new die. */
26656 if (context
26657 && TYPE_P (context)
26658 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26659 return;
26660
26661 scope_die = get_context_die (context);
26662
26663 if (child)
26664 {
26665 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26666 there is nothing we can do, here. */
26667 if (dwarf_version < 3 && dwarf_strict)
26668 return;
26669
26670 gcc_assert (scope_die->die_child);
26671 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26672 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26673 scope_die = scope_die->die_child;
26674 }
26675
26676 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26677 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26678 }
26679
26680 /* Output debug information for namelists. */
26681
26682 static dw_die_ref
26683 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26684 {
26685 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26686 tree value;
26687 unsigned i;
26688
26689 if (debug_info_level <= DINFO_LEVEL_TERSE)
26690 return NULL;
26691
26692 gcc_assert (scope_die != NULL);
26693 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26694 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26695
26696 /* If there are no item_decls, we have a nondefining namelist, e.g.
26697 with USE association; hence, set DW_AT_declaration. */
26698 if (item_decls == NULL_TREE)
26699 {
26700 add_AT_flag (nml_die, DW_AT_declaration, 1);
26701 return nml_die;
26702 }
26703
26704 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26705 {
26706 nml_item_ref_die = lookup_decl_die (value);
26707 if (!nml_item_ref_die)
26708 nml_item_ref_die = force_decl_die (value);
26709
26710 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26711 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26712 }
26713 return nml_die;
26714 }
26715
26716
26717 /* Write the debugging output for DECL and return the DIE. */
26718
26719 static void
26720 dwarf2out_decl (tree decl)
26721 {
26722 dw_die_ref context_die = comp_unit_die ();
26723
26724 switch (TREE_CODE (decl))
26725 {
26726 case ERROR_MARK:
26727 return;
26728
26729 case FUNCTION_DECL:
26730 /* If we're a nested function, initially use a parent of NULL; if we're
26731 a plain function, this will be fixed up in decls_for_scope. If
26732 we're a method, it will be ignored, since we already have a DIE.
26733 Avoid doing this late though since clones of class methods may
26734 otherwise end up in limbo and create type DIEs late. */
26735 if (early_dwarf
26736 && decl_function_context (decl)
26737 /* But if we're in terse mode, we don't care about scope. */
26738 && debug_info_level > DINFO_LEVEL_TERSE)
26739 context_die = NULL;
26740 break;
26741
26742 case VAR_DECL:
26743 /* For local statics lookup proper context die. */
26744 if (local_function_static (decl))
26745 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26746
26747 /* If we are in terse mode, don't generate any DIEs to represent any
26748 variable declarations or definitions. */
26749 if (debug_info_level <= DINFO_LEVEL_TERSE)
26750 return;
26751 break;
26752
26753 case CONST_DECL:
26754 if (debug_info_level <= DINFO_LEVEL_TERSE)
26755 return;
26756 if (!is_fortran () && !is_ada ())
26757 return;
26758 if (TREE_STATIC (decl) && decl_function_context (decl))
26759 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26760 break;
26761
26762 case NAMESPACE_DECL:
26763 case IMPORTED_DECL:
26764 if (debug_info_level <= DINFO_LEVEL_TERSE)
26765 return;
26766 if (lookup_decl_die (decl) != NULL)
26767 return;
26768 break;
26769
26770 case TYPE_DECL:
26771 /* Don't emit stubs for types unless they are needed by other DIEs. */
26772 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26773 return;
26774
26775 /* Don't bother trying to generate any DIEs to represent any of the
26776 normal built-in types for the language we are compiling. */
26777 if (DECL_IS_BUILTIN (decl))
26778 return;
26779
26780 /* If we are in terse mode, don't generate any DIEs for types. */
26781 if (debug_info_level <= DINFO_LEVEL_TERSE)
26782 return;
26783
26784 /* If we're a function-scope tag, initially use a parent of NULL;
26785 this will be fixed up in decls_for_scope. */
26786 if (decl_function_context (decl))
26787 context_die = NULL;
26788
26789 break;
26790
26791 case NAMELIST_DECL:
26792 break;
26793
26794 default:
26795 return;
26796 }
26797
26798 gen_decl_die (decl, NULL, NULL, context_die);
26799
26800 if (flag_checking)
26801 {
26802 dw_die_ref die = lookup_decl_die (decl);
26803 if (die)
26804 check_die (die);
26805 }
26806 }
26807
26808 /* Write the debugging output for DECL. */
26809
26810 static void
26811 dwarf2out_function_decl (tree decl)
26812 {
26813 dwarf2out_decl (decl);
26814 call_arg_locations = NULL;
26815 call_arg_loc_last = NULL;
26816 call_site_count = -1;
26817 tail_call_site_count = -1;
26818 decl_loc_table->empty ();
26819 cached_dw_loc_list_table->empty ();
26820 }
26821
26822 /* Output a marker (i.e. a label) for the beginning of the generated code for
26823 a lexical block. */
26824
26825 static void
26826 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26827 unsigned int blocknum)
26828 {
26829 switch_to_section (current_function_section ());
26830 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26831 }
26832
26833 /* Output a marker (i.e. a label) for the end of the generated code for a
26834 lexical block. */
26835
26836 static void
26837 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26838 {
26839 switch_to_section (current_function_section ());
26840 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26841 }
26842
26843 /* Returns nonzero if it is appropriate not to emit any debugging
26844 information for BLOCK, because it doesn't contain any instructions.
26845
26846 Don't allow this for blocks with nested functions or local classes
26847 as we would end up with orphans, and in the presence of scheduling
26848 we may end up calling them anyway. */
26849
26850 static bool
26851 dwarf2out_ignore_block (const_tree block)
26852 {
26853 tree decl;
26854 unsigned int i;
26855
26856 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26857 if (TREE_CODE (decl) == FUNCTION_DECL
26858 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26859 return 0;
26860 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26861 {
26862 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26863 if (TREE_CODE (decl) == FUNCTION_DECL
26864 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26865 return 0;
26866 }
26867
26868 return 1;
26869 }
26870
26871 /* Hash table routines for file_hash. */
26872
26873 bool
26874 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26875 {
26876 return filename_cmp (p1->filename, p2) == 0;
26877 }
26878
26879 hashval_t
26880 dwarf_file_hasher::hash (dwarf_file_data *p)
26881 {
26882 return htab_hash_string (p->filename);
26883 }
26884
26885 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26886 dwarf2out.c) and return its "index". The index of each (known) filename is
26887 just a unique number which is associated with only that one filename. We
26888 need such numbers for the sake of generating labels (in the .debug_sfnames
26889 section) and references to those files numbers (in the .debug_srcinfo
26890 and .debug_macinfo sections). If the filename given as an argument is not
26891 found in our current list, add it to the list and assign it the next
26892 available unique index number. */
26893
26894 static struct dwarf_file_data *
26895 lookup_filename (const char *file_name)
26896 {
26897 struct dwarf_file_data * created;
26898
26899 if (!file_name)
26900 return NULL;
26901
26902 dwarf_file_data **slot
26903 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26904 INSERT);
26905 if (*slot)
26906 return *slot;
26907
26908 created = ggc_alloc<dwarf_file_data> ();
26909 created->filename = file_name;
26910 created->emitted_number = 0;
26911 *slot = created;
26912 return created;
26913 }
26914
26915 /* If the assembler will construct the file table, then translate the compiler
26916 internal file table number into the assembler file table number, and emit
26917 a .file directive if we haven't already emitted one yet. The file table
26918 numbers are different because we prune debug info for unused variables and
26919 types, which may include filenames. */
26920
26921 static int
26922 maybe_emit_file (struct dwarf_file_data * fd)
26923 {
26924 if (! fd->emitted_number)
26925 {
26926 if (last_emitted_file)
26927 fd->emitted_number = last_emitted_file->emitted_number + 1;
26928 else
26929 fd->emitted_number = 1;
26930 last_emitted_file = fd;
26931
26932 if (output_asm_line_debug_info ())
26933 {
26934 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26935 output_quoted_string (asm_out_file,
26936 remap_debug_filename (fd->filename));
26937 fputc ('\n', asm_out_file);
26938 }
26939 }
26940
26941 return fd->emitted_number;
26942 }
26943
26944 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26945 That generation should happen after function debug info has been
26946 generated. The value of the attribute is the constant value of ARG. */
26947
26948 static void
26949 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26950 {
26951 die_arg_entry entry;
26952
26953 if (!die || !arg)
26954 return;
26955
26956 gcc_assert (early_dwarf);
26957
26958 if (!tmpl_value_parm_die_table)
26959 vec_alloc (tmpl_value_parm_die_table, 32);
26960
26961 entry.die = die;
26962 entry.arg = arg;
26963 vec_safe_push (tmpl_value_parm_die_table, entry);
26964 }
26965
26966 /* Return TRUE if T is an instance of generic type, FALSE
26967 otherwise. */
26968
26969 static bool
26970 generic_type_p (tree t)
26971 {
26972 if (t == NULL_TREE || !TYPE_P (t))
26973 return false;
26974 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26975 }
26976
26977 /* Schedule the generation of the generic parameter dies for the
26978 instance of generic type T. The proper generation itself is later
26979 done by gen_scheduled_generic_parms_dies. */
26980
26981 static void
26982 schedule_generic_params_dies_gen (tree t)
26983 {
26984 if (!generic_type_p (t))
26985 return;
26986
26987 gcc_assert (early_dwarf);
26988
26989 if (!generic_type_instances)
26990 vec_alloc (generic_type_instances, 256);
26991
26992 vec_safe_push (generic_type_instances, t);
26993 }
26994
26995 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26996 by append_entry_to_tmpl_value_parm_die_table. This function must
26997 be called after function DIEs have been generated. */
26998
26999 static void
27000 gen_remaining_tmpl_value_param_die_attribute (void)
27001 {
27002 if (tmpl_value_parm_die_table)
27003 {
27004 unsigned i, j;
27005 die_arg_entry *e;
27006
27007 /* We do this in two phases - first get the cases we can
27008 handle during early-finish, preserving those we cannot
27009 (containing symbolic constants where we don't yet know
27010 whether we are going to output the referenced symbols).
27011 For those we try again at late-finish. */
27012 j = 0;
27013 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27014 {
27015 if (!e->die->removed
27016 && !tree_add_const_value_attribute (e->die, e->arg))
27017 {
27018 dw_loc_descr_ref loc = NULL;
27019 if (! early_dwarf
27020 && (dwarf_version >= 5 || !dwarf_strict))
27021 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27022 if (loc)
27023 add_AT_loc (e->die, DW_AT_location, loc);
27024 else
27025 (*tmpl_value_parm_die_table)[j++] = *e;
27026 }
27027 }
27028 tmpl_value_parm_die_table->truncate (j);
27029 }
27030 }
27031
27032 /* Generate generic parameters DIEs for instances of generic types
27033 that have been previously scheduled by
27034 schedule_generic_params_dies_gen. This function must be called
27035 after all the types of the CU have been laid out. */
27036
27037 static void
27038 gen_scheduled_generic_parms_dies (void)
27039 {
27040 unsigned i;
27041 tree t;
27042
27043 if (!generic_type_instances)
27044 return;
27045
27046 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27047 if (COMPLETE_TYPE_P (t))
27048 gen_generic_params_dies (t);
27049
27050 generic_type_instances = NULL;
27051 }
27052
27053
27054 /* Replace DW_AT_name for the decl with name. */
27055
27056 static void
27057 dwarf2out_set_name (tree decl, tree name)
27058 {
27059 dw_die_ref die;
27060 dw_attr_node *attr;
27061 const char *dname;
27062
27063 die = TYPE_SYMTAB_DIE (decl);
27064 if (!die)
27065 return;
27066
27067 dname = dwarf2_name (name, 0);
27068 if (!dname)
27069 return;
27070
27071 attr = get_AT (die, DW_AT_name);
27072 if (attr)
27073 {
27074 struct indirect_string_node *node;
27075
27076 node = find_AT_string (dname);
27077 /* replace the string. */
27078 attr->dw_attr_val.v.val_str = node;
27079 }
27080
27081 else
27082 add_name_attribute (die, dname);
27083 }
27084
27085 /* True if before or during processing of the first function being emitted. */
27086 static bool in_first_function_p = true;
27087 /* True if loc_note during dwarf2out_var_location call might still be
27088 before first real instruction at address equal to .Ltext0. */
27089 static bool maybe_at_text_label_p = true;
27090 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27091 static unsigned int first_loclabel_num_not_at_text_label;
27092
27093 /* Look ahead for a real insn, or for a begin stmt marker. */
27094
27095 static rtx_insn *
27096 dwarf2out_next_real_insn (rtx_insn *loc_note)
27097 {
27098 rtx_insn *next_real = NEXT_INSN (loc_note);
27099
27100 while (next_real)
27101 if (INSN_P (next_real))
27102 break;
27103 else
27104 next_real = NEXT_INSN (next_real);
27105
27106 return next_real;
27107 }
27108
27109 /* Called by the final INSN scan whenever we see a var location. We
27110 use it to drop labels in the right places, and throw the location in
27111 our lookup table. */
27112
27113 static void
27114 dwarf2out_var_location (rtx_insn *loc_note)
27115 {
27116 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27117 struct var_loc_node *newloc;
27118 rtx_insn *next_real, *next_note;
27119 rtx_insn *call_insn = NULL;
27120 static const char *last_label;
27121 static const char *last_postcall_label;
27122 static bool last_in_cold_section_p;
27123 static rtx_insn *expected_next_loc_note;
27124 tree decl;
27125 bool var_loc_p;
27126 var_loc_view view = 0;
27127
27128 if (!NOTE_P (loc_note))
27129 {
27130 if (CALL_P (loc_note))
27131 {
27132 maybe_reset_location_view (loc_note, cur_line_info_table);
27133 call_site_count++;
27134 if (SIBLING_CALL_P (loc_note))
27135 tail_call_site_count++;
27136 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27137 {
27138 call_insn = loc_note;
27139 loc_note = NULL;
27140 var_loc_p = false;
27141
27142 next_real = dwarf2out_next_real_insn (call_insn);
27143 next_note = NULL;
27144 cached_next_real_insn = NULL;
27145 goto create_label;
27146 }
27147 if (optimize == 0 && !flag_var_tracking)
27148 {
27149 /* When the var-tracking pass is not running, there is no note
27150 for indirect calls whose target is compile-time known. In this
27151 case, process such calls specifically so that we generate call
27152 sites for them anyway. */
27153 rtx x = PATTERN (loc_note);
27154 if (GET_CODE (x) == PARALLEL)
27155 x = XVECEXP (x, 0, 0);
27156 if (GET_CODE (x) == SET)
27157 x = SET_SRC (x);
27158 if (GET_CODE (x) == CALL)
27159 x = XEXP (x, 0);
27160 if (!MEM_P (x)
27161 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27162 || !SYMBOL_REF_DECL (XEXP (x, 0))
27163 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27164 != FUNCTION_DECL))
27165 {
27166 call_insn = loc_note;
27167 loc_note = NULL;
27168 var_loc_p = false;
27169
27170 next_real = dwarf2out_next_real_insn (call_insn);
27171 next_note = NULL;
27172 cached_next_real_insn = NULL;
27173 goto create_label;
27174 }
27175 }
27176 }
27177 else if (!debug_variable_location_views)
27178 gcc_unreachable ();
27179 else
27180 maybe_reset_location_view (loc_note, cur_line_info_table);
27181
27182 return;
27183 }
27184
27185 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27186 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27187 return;
27188
27189 /* Optimize processing a large consecutive sequence of location
27190 notes so we don't spend too much time in next_real_insn. If the
27191 next insn is another location note, remember the next_real_insn
27192 calculation for next time. */
27193 next_real = cached_next_real_insn;
27194 if (next_real)
27195 {
27196 if (expected_next_loc_note != loc_note)
27197 next_real = NULL;
27198 }
27199
27200 next_note = NEXT_INSN (loc_note);
27201 if (! next_note
27202 || next_note->deleted ()
27203 || ! NOTE_P (next_note)
27204 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27205 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27206 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27207 next_note = NULL;
27208
27209 if (! next_real)
27210 next_real = dwarf2out_next_real_insn (loc_note);
27211
27212 if (next_note)
27213 {
27214 expected_next_loc_note = next_note;
27215 cached_next_real_insn = next_real;
27216 }
27217 else
27218 cached_next_real_insn = NULL;
27219
27220 /* If there are no instructions which would be affected by this note,
27221 don't do anything. */
27222 if (var_loc_p
27223 && next_real == NULL_RTX
27224 && !NOTE_DURING_CALL_P (loc_note))
27225 return;
27226
27227 create_label:
27228
27229 if (next_real == NULL_RTX)
27230 next_real = get_last_insn ();
27231
27232 /* If there were any real insns between note we processed last time
27233 and this note (or if it is the first note), clear
27234 last_{,postcall_}label so that they are not reused this time. */
27235 if (last_var_location_insn == NULL_RTX
27236 || last_var_location_insn != next_real
27237 || last_in_cold_section_p != in_cold_section_p)
27238 {
27239 last_label = NULL;
27240 last_postcall_label = NULL;
27241 }
27242
27243 if (var_loc_p)
27244 {
27245 const char *label
27246 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27247 view = cur_line_info_table->view;
27248 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27249 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27250 if (newloc == NULL)
27251 return;
27252 }
27253 else
27254 {
27255 decl = NULL_TREE;
27256 newloc = NULL;
27257 }
27258
27259 /* If there were no real insns between note we processed last time
27260 and this note, use the label we emitted last time. Otherwise
27261 create a new label and emit it. */
27262 if (last_label == NULL)
27263 {
27264 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27265 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27266 loclabel_num++;
27267 last_label = ggc_strdup (loclabel);
27268 /* See if loclabel might be equal to .Ltext0. If yes,
27269 bump first_loclabel_num_not_at_text_label. */
27270 if (!have_multiple_function_sections
27271 && in_first_function_p
27272 && maybe_at_text_label_p)
27273 {
27274 static rtx_insn *last_start;
27275 rtx_insn *insn;
27276 for (insn = loc_note; insn; insn = previous_insn (insn))
27277 if (insn == last_start)
27278 break;
27279 else if (!NONDEBUG_INSN_P (insn))
27280 continue;
27281 else
27282 {
27283 rtx body = PATTERN (insn);
27284 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27285 continue;
27286 /* Inline asm could occupy zero bytes. */
27287 else if (GET_CODE (body) == ASM_INPUT
27288 || asm_noperands (body) >= 0)
27289 continue;
27290 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27291 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27292 continue;
27293 #endif
27294 else
27295 {
27296 /* Assume insn has non-zero length. */
27297 maybe_at_text_label_p = false;
27298 break;
27299 }
27300 }
27301 if (maybe_at_text_label_p)
27302 {
27303 last_start = loc_note;
27304 first_loclabel_num_not_at_text_label = loclabel_num;
27305 }
27306 }
27307 }
27308
27309 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27310 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27311
27312 if (!var_loc_p)
27313 {
27314 struct call_arg_loc_node *ca_loc
27315 = ggc_cleared_alloc<call_arg_loc_node> ();
27316 rtx_insn *prev = call_insn;
27317
27318 ca_loc->call_arg_loc_note
27319 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27320 ca_loc->next = NULL;
27321 ca_loc->label = last_label;
27322 gcc_assert (prev
27323 && (CALL_P (prev)
27324 || (NONJUMP_INSN_P (prev)
27325 && GET_CODE (PATTERN (prev)) == SEQUENCE
27326 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27327 if (!CALL_P (prev))
27328 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27329 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27330
27331 /* Look for a SYMBOL_REF in the "prev" instruction. */
27332 rtx x = get_call_rtx_from (PATTERN (prev));
27333 if (x)
27334 {
27335 /* Try to get the call symbol, if any. */
27336 if (MEM_P (XEXP (x, 0)))
27337 x = XEXP (x, 0);
27338 /* First, look for a memory access to a symbol_ref. */
27339 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27340 && SYMBOL_REF_DECL (XEXP (x, 0))
27341 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27342 ca_loc->symbol_ref = XEXP (x, 0);
27343 /* Otherwise, look at a compile-time known user-level function
27344 declaration. */
27345 else if (MEM_P (x)
27346 && MEM_EXPR (x)
27347 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27348 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27349 }
27350
27351 ca_loc->block = insn_scope (prev);
27352 if (call_arg_locations)
27353 call_arg_loc_last->next = ca_loc;
27354 else
27355 call_arg_locations = ca_loc;
27356 call_arg_loc_last = ca_loc;
27357 }
27358 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27359 {
27360 newloc->label = last_label;
27361 newloc->view = view;
27362 }
27363 else
27364 {
27365 if (!last_postcall_label)
27366 {
27367 sprintf (loclabel, "%s-1", last_label);
27368 last_postcall_label = ggc_strdup (loclabel);
27369 }
27370 newloc->label = last_postcall_label;
27371 /* ??? This view is at last_label, not last_label-1, but we
27372 could only assume view at last_label-1 is zero if we could
27373 assume calls always have length greater than one. This is
27374 probably true in general, though there might be a rare
27375 exception to this rule, e.g. if a call insn is optimized out
27376 by target magic. Then, even the -1 in the label will be
27377 wrong, which might invalidate the range. Anyway, using view,
27378 though technically possibly incorrect, will work as far as
27379 ranges go: since L-1 is in the middle of the call insn,
27380 (L-1).0 and (L-1).V shouldn't make any difference, and having
27381 the loclist entry refer to the .loc entry might be useful, so
27382 leave it like this. */
27383 newloc->view = view;
27384 }
27385
27386 if (var_loc_p && flag_debug_asm)
27387 {
27388 const char *name, *sep, *patstr;
27389 if (decl && DECL_NAME (decl))
27390 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27391 else
27392 name = "";
27393 if (NOTE_VAR_LOCATION_LOC (loc_note))
27394 {
27395 sep = " => ";
27396 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27397 }
27398 else
27399 {
27400 sep = " ";
27401 patstr = "RESET";
27402 }
27403 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27404 name, sep, patstr);
27405 }
27406
27407 last_var_location_insn = next_real;
27408 last_in_cold_section_p = in_cold_section_p;
27409 }
27410
27411 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27412 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27413 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27414 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27415 BLOCK_FRAGMENT_ORIGIN links. */
27416 static bool
27417 block_within_block_p (tree block, tree outer, bool bothways)
27418 {
27419 if (block == outer)
27420 return true;
27421
27422 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27423 for (tree context = BLOCK_SUPERCONTEXT (block);
27424 context != outer;
27425 context = BLOCK_SUPERCONTEXT (context))
27426 if (!context || TREE_CODE (context) != BLOCK)
27427 return false;
27428
27429 if (!bothways)
27430 return true;
27431
27432 /* Now check that each block is actually referenced by its
27433 parent. */
27434 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27435 context = BLOCK_SUPERCONTEXT (context))
27436 {
27437 if (BLOCK_FRAGMENT_ORIGIN (context))
27438 {
27439 gcc_assert (!BLOCK_SUBBLOCKS (context));
27440 context = BLOCK_FRAGMENT_ORIGIN (context);
27441 }
27442 for (tree sub = BLOCK_SUBBLOCKS (context);
27443 sub != block;
27444 sub = BLOCK_CHAIN (sub))
27445 if (!sub)
27446 return false;
27447 if (context == outer)
27448 return true;
27449 else
27450 block = context;
27451 }
27452 }
27453
27454 /* Called during final while assembling the marker of the entry point
27455 for an inlined function. */
27456
27457 static void
27458 dwarf2out_inline_entry (tree block)
27459 {
27460 gcc_assert (debug_inline_points);
27461
27462 /* If we can't represent it, don't bother. */
27463 if (!(dwarf_version >= 3 || !dwarf_strict))
27464 return;
27465
27466 gcc_assert (DECL_P (block_ultimate_origin (block)));
27467
27468 /* Sanity check the block tree. This would catch a case in which
27469 BLOCK got removed from the tree reachable from the outermost
27470 lexical block, but got retained in markers. It would still link
27471 back to its parents, but some ancestor would be missing a link
27472 down the path to the sub BLOCK. If the block got removed, its
27473 BLOCK_NUMBER will not be a usable value. */
27474 if (flag_checking)
27475 gcc_assert (block_within_block_p (block,
27476 DECL_INITIAL (current_function_decl),
27477 true));
27478
27479 gcc_assert (inlined_function_outer_scope_p (block));
27480 gcc_assert (!BLOCK_DIE (block));
27481
27482 if (BLOCK_FRAGMENT_ORIGIN (block))
27483 block = BLOCK_FRAGMENT_ORIGIN (block);
27484 /* Can the entry point ever not be at the beginning of an
27485 unfragmented lexical block? */
27486 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27487 || (cur_line_info_table
27488 && !ZERO_VIEW_P (cur_line_info_table->view))))
27489 return;
27490
27491 if (!inline_entry_data_table)
27492 inline_entry_data_table
27493 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27494
27495
27496 inline_entry_data **iedp
27497 = inline_entry_data_table->find_slot_with_hash (block,
27498 htab_hash_pointer (block),
27499 INSERT);
27500 if (*iedp)
27501 /* ??? Ideally, we'd record all entry points for the same inlined
27502 function (some may have been duplicated by e.g. unrolling), but
27503 we have no way to represent that ATM. */
27504 return;
27505
27506 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27507 ied->block = block;
27508 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27509 ied->label_num = BLOCK_NUMBER (block);
27510 if (cur_line_info_table)
27511 ied->view = cur_line_info_table->view;
27512
27513 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27514
27515 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27516 BLOCK_NUMBER (block));
27517 ASM_OUTPUT_LABEL (asm_out_file, label);
27518 }
27519
27520 /* Called from finalize_size_functions for size functions so that their body
27521 can be encoded in the debug info to describe the layout of variable-length
27522 structures. */
27523
27524 static void
27525 dwarf2out_size_function (tree decl)
27526 {
27527 function_to_dwarf_procedure (decl);
27528 }
27529
27530 /* Note in one location list that text section has changed. */
27531
27532 int
27533 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27534 {
27535 var_loc_list *list = *slot;
27536 if (list->first)
27537 list->last_before_switch
27538 = list->last->next ? list->last->next : list->last;
27539 return 1;
27540 }
27541
27542 /* Note in all location lists that text section has changed. */
27543
27544 static void
27545 var_location_switch_text_section (void)
27546 {
27547 if (decl_loc_table == NULL)
27548 return;
27549
27550 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27551 }
27552
27553 /* Create a new line number table. */
27554
27555 static dw_line_info_table *
27556 new_line_info_table (void)
27557 {
27558 dw_line_info_table *table;
27559
27560 table = ggc_cleared_alloc<dw_line_info_table> ();
27561 table->file_num = 1;
27562 table->line_num = 1;
27563 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27564 FORCE_RESET_NEXT_VIEW (table->view);
27565 table->symviews_since_reset = 0;
27566
27567 return table;
27568 }
27569
27570 /* Lookup the "current" table into which we emit line info, so
27571 that we don't have to do it for every source line. */
27572
27573 static void
27574 set_cur_line_info_table (section *sec)
27575 {
27576 dw_line_info_table *table;
27577
27578 if (sec == text_section)
27579 table = text_section_line_info;
27580 else if (sec == cold_text_section)
27581 {
27582 table = cold_text_section_line_info;
27583 if (!table)
27584 {
27585 cold_text_section_line_info = table = new_line_info_table ();
27586 table->end_label = cold_end_label;
27587 }
27588 }
27589 else
27590 {
27591 const char *end_label;
27592
27593 if (crtl->has_bb_partition)
27594 {
27595 if (in_cold_section_p)
27596 end_label = crtl->subsections.cold_section_end_label;
27597 else
27598 end_label = crtl->subsections.hot_section_end_label;
27599 }
27600 else
27601 {
27602 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27603 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27604 current_function_funcdef_no);
27605 end_label = ggc_strdup (label);
27606 }
27607
27608 table = new_line_info_table ();
27609 table->end_label = end_label;
27610
27611 vec_safe_push (separate_line_info, table);
27612 }
27613
27614 if (output_asm_line_debug_info ())
27615 table->is_stmt = (cur_line_info_table
27616 ? cur_line_info_table->is_stmt
27617 : DWARF_LINE_DEFAULT_IS_STMT_START);
27618 cur_line_info_table = table;
27619 }
27620
27621
27622 /* We need to reset the locations at the beginning of each
27623 function. We can't do this in the end_function hook, because the
27624 declarations that use the locations won't have been output when
27625 that hook is called. Also compute have_multiple_function_sections here. */
27626
27627 static void
27628 dwarf2out_begin_function (tree fun)
27629 {
27630 section *sec = function_section (fun);
27631
27632 if (sec != text_section)
27633 have_multiple_function_sections = true;
27634
27635 if (crtl->has_bb_partition && !cold_text_section)
27636 {
27637 gcc_assert (current_function_decl == fun);
27638 cold_text_section = unlikely_text_section ();
27639 switch_to_section (cold_text_section);
27640 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27641 switch_to_section (sec);
27642 }
27643
27644 dwarf2out_note_section_used ();
27645 call_site_count = 0;
27646 tail_call_site_count = 0;
27647
27648 set_cur_line_info_table (sec);
27649 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27650 }
27651
27652 /* Helper function of dwarf2out_end_function, called only after emitting
27653 the very first function into assembly. Check if some .debug_loc range
27654 might end with a .LVL* label that could be equal to .Ltext0.
27655 In that case we must force using absolute addresses in .debug_loc ranges,
27656 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27657 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27658 list terminator.
27659 Set have_multiple_function_sections to true in that case and
27660 terminate htab traversal. */
27661
27662 int
27663 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27664 {
27665 var_loc_list *entry = *slot;
27666 struct var_loc_node *node;
27667
27668 node = entry->first;
27669 if (node && node->next && node->next->label)
27670 {
27671 unsigned int i;
27672 const char *label = node->next->label;
27673 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27674
27675 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27676 {
27677 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27678 if (strcmp (label, loclabel) == 0)
27679 {
27680 have_multiple_function_sections = true;
27681 return 0;
27682 }
27683 }
27684 }
27685 return 1;
27686 }
27687
27688 /* Hook called after emitting a function into assembly.
27689 This does something only for the very first function emitted. */
27690
27691 static void
27692 dwarf2out_end_function (unsigned int)
27693 {
27694 if (in_first_function_p
27695 && !have_multiple_function_sections
27696 && first_loclabel_num_not_at_text_label
27697 && decl_loc_table)
27698 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27699 in_first_function_p = false;
27700 maybe_at_text_label_p = false;
27701 }
27702
27703 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27704 front-ends register a translation unit even before dwarf2out_init is
27705 called. */
27706 static tree main_translation_unit = NULL_TREE;
27707
27708 /* Hook called by front-ends after they built their main translation unit.
27709 Associate comp_unit_die to UNIT. */
27710
27711 static void
27712 dwarf2out_register_main_translation_unit (tree unit)
27713 {
27714 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27715 && main_translation_unit == NULL_TREE);
27716 main_translation_unit = unit;
27717 /* If dwarf2out_init has not been called yet, it will perform the association
27718 itself looking at main_translation_unit. */
27719 if (decl_die_table != NULL)
27720 equate_decl_number_to_die (unit, comp_unit_die ());
27721 }
27722
27723 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27724
27725 static void
27726 push_dw_line_info_entry (dw_line_info_table *table,
27727 enum dw_line_info_opcode opcode, unsigned int val)
27728 {
27729 dw_line_info_entry e;
27730 e.opcode = opcode;
27731 e.val = val;
27732 vec_safe_push (table->entries, e);
27733 }
27734
27735 /* Output a label to mark the beginning of a source code line entry
27736 and record information relating to this source line, in
27737 'line_info_table' for later output of the .debug_line section. */
27738 /* ??? The discriminator parameter ought to be unsigned. */
27739
27740 static void
27741 dwarf2out_source_line (unsigned int line, unsigned int column,
27742 const char *filename,
27743 int discriminator, bool is_stmt)
27744 {
27745 unsigned int file_num;
27746 dw_line_info_table *table;
27747 static var_loc_view lvugid;
27748
27749 if (debug_info_level < DINFO_LEVEL_TERSE)
27750 return;
27751
27752 table = cur_line_info_table;
27753
27754 if (line == 0)
27755 {
27756 if (debug_variable_location_views
27757 && output_asm_line_debug_info ()
27758 && table && !RESETTING_VIEW_P (table->view))
27759 {
27760 /* If we're using the assembler to compute view numbers, we
27761 can't issue a .loc directive for line zero, so we can't
27762 get a view number at this point. We might attempt to
27763 compute it from the previous view, or equate it to a
27764 subsequent view (though it might not be there!), but
27765 since we're omitting the line number entry, we might as
27766 well omit the view number as well. That means pretending
27767 it's a view number zero, which might very well turn out
27768 to be correct. ??? Extend the assembler so that the
27769 compiler could emit e.g. ".locview .LVU#", to output a
27770 view without changing line number information. We'd then
27771 have to count it in symviews_since_reset; when it's omitted,
27772 it doesn't count. */
27773 if (!zero_view_p)
27774 zero_view_p = BITMAP_GGC_ALLOC ();
27775 bitmap_set_bit (zero_view_p, table->view);
27776 if (flag_debug_asm)
27777 {
27778 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27779 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27780 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27781 ASM_COMMENT_START);
27782 assemble_name (asm_out_file, label);
27783 putc ('\n', asm_out_file);
27784 }
27785 table->view = ++lvugid;
27786 }
27787 return;
27788 }
27789
27790 /* The discriminator column was added in dwarf4. Simplify the below
27791 by simply removing it if we're not supposed to output it. */
27792 if (dwarf_version < 4 && dwarf_strict)
27793 discriminator = 0;
27794
27795 if (!debug_column_info)
27796 column = 0;
27797
27798 file_num = maybe_emit_file (lookup_filename (filename));
27799
27800 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27801 the debugger has used the second (possibly duplicate) line number
27802 at the beginning of the function to mark the end of the prologue.
27803 We could eliminate any other duplicates within the function. For
27804 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27805 that second line number entry. */
27806 /* Recall that this end-of-prologue indication is *not* the same thing
27807 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27808 to which the hook corresponds, follows the last insn that was
27809 emitted by gen_prologue. What we need is to precede the first insn
27810 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27811 insn that corresponds to something the user wrote. These may be
27812 very different locations once scheduling is enabled. */
27813
27814 if (0 && file_num == table->file_num
27815 && line == table->line_num
27816 && column == table->column_num
27817 && discriminator == table->discrim_num
27818 && is_stmt == table->is_stmt)
27819 return;
27820
27821 switch_to_section (current_function_section ());
27822
27823 /* If requested, emit something human-readable. */
27824 if (flag_debug_asm)
27825 {
27826 if (debug_column_info)
27827 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27828 filename, line, column);
27829 else
27830 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27831 filename, line);
27832 }
27833
27834 if (output_asm_line_debug_info ())
27835 {
27836 /* Emit the .loc directive understood by GNU as. */
27837 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27838 file_num, line, is_stmt, discriminator */
27839 fputs ("\t.loc ", asm_out_file);
27840 fprint_ul (asm_out_file, file_num);
27841 putc (' ', asm_out_file);
27842 fprint_ul (asm_out_file, line);
27843 putc (' ', asm_out_file);
27844 fprint_ul (asm_out_file, column);
27845
27846 if (is_stmt != table->is_stmt)
27847 {
27848 fputs (" is_stmt ", asm_out_file);
27849 putc (is_stmt ? '1' : '0', asm_out_file);
27850 }
27851 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27852 {
27853 gcc_assert (discriminator > 0);
27854 fputs (" discriminator ", asm_out_file);
27855 fprint_ul (asm_out_file, (unsigned long) discriminator);
27856 }
27857 if (debug_variable_location_views)
27858 {
27859 if (!RESETTING_VIEW_P (table->view))
27860 {
27861 table->symviews_since_reset++;
27862 if (table->symviews_since_reset > symview_upper_bound)
27863 symview_upper_bound = table->symviews_since_reset;
27864 /* When we're using the assembler to compute view
27865 numbers, we output symbolic labels after "view" in
27866 .loc directives, and the assembler will set them for
27867 us, so that we can refer to the view numbers in
27868 location lists. The only exceptions are when we know
27869 a view will be zero: "-0" is a forced reset, used
27870 e.g. in the beginning of functions, whereas "0" tells
27871 the assembler to check that there was a PC change
27872 since the previous view, in a way that implicitly
27873 resets the next view. */
27874 fputs (" view ", asm_out_file);
27875 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27876 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27877 assemble_name (asm_out_file, label);
27878 table->view = ++lvugid;
27879 }
27880 else
27881 {
27882 table->symviews_since_reset = 0;
27883 if (FORCE_RESETTING_VIEW_P (table->view))
27884 fputs (" view -0", asm_out_file);
27885 else
27886 fputs (" view 0", asm_out_file);
27887 /* Mark the present view as a zero view. Earlier debug
27888 binds may have already added its id to loclists to be
27889 emitted later, so we can't reuse the id for something
27890 else. However, it's good to know whether a view is
27891 known to be zero, because then we may be able to
27892 optimize out locviews that are all zeros, so take
27893 note of it in zero_view_p. */
27894 if (!zero_view_p)
27895 zero_view_p = BITMAP_GGC_ALLOC ();
27896 bitmap_set_bit (zero_view_p, lvugid);
27897 table->view = ++lvugid;
27898 }
27899 }
27900 putc ('\n', asm_out_file);
27901 }
27902 else
27903 {
27904 unsigned int label_num = ++line_info_label_num;
27905
27906 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27907
27908 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27909 push_dw_line_info_entry (table, LI_adv_address, label_num);
27910 else
27911 push_dw_line_info_entry (table, LI_set_address, label_num);
27912 if (debug_variable_location_views)
27913 {
27914 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27915 if (resetting)
27916 table->view = 0;
27917
27918 if (flag_debug_asm)
27919 fprintf (asm_out_file, "\t%s view %s%d\n",
27920 ASM_COMMENT_START,
27921 resetting ? "-" : "",
27922 table->view);
27923
27924 table->view++;
27925 }
27926 if (file_num != table->file_num)
27927 push_dw_line_info_entry (table, LI_set_file, file_num);
27928 if (discriminator != table->discrim_num)
27929 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27930 if (is_stmt != table->is_stmt)
27931 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27932 push_dw_line_info_entry (table, LI_set_line, line);
27933 if (debug_column_info)
27934 push_dw_line_info_entry (table, LI_set_column, column);
27935 }
27936
27937 table->file_num = file_num;
27938 table->line_num = line;
27939 table->column_num = column;
27940 table->discrim_num = discriminator;
27941 table->is_stmt = is_stmt;
27942 table->in_use = true;
27943 }
27944
27945 /* Record the beginning of a new source file. */
27946
27947 static void
27948 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27949 {
27950 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27951 {
27952 macinfo_entry e;
27953 e.code = DW_MACINFO_start_file;
27954 e.lineno = lineno;
27955 e.info = ggc_strdup (filename);
27956 vec_safe_push (macinfo_table, e);
27957 }
27958 }
27959
27960 /* Record the end of a source file. */
27961
27962 static void
27963 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27964 {
27965 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27966 {
27967 macinfo_entry e;
27968 e.code = DW_MACINFO_end_file;
27969 e.lineno = lineno;
27970 e.info = NULL;
27971 vec_safe_push (macinfo_table, e);
27972 }
27973 }
27974
27975 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27976 the tail part of the directive line, i.e. the part which is past the
27977 initial whitespace, #, whitespace, directive-name, whitespace part. */
27978
27979 static void
27980 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27981 const char *buffer ATTRIBUTE_UNUSED)
27982 {
27983 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27984 {
27985 macinfo_entry e;
27986 /* Insert a dummy first entry to be able to optimize the whole
27987 predefined macro block using DW_MACRO_import. */
27988 if (macinfo_table->is_empty () && lineno <= 1)
27989 {
27990 e.code = 0;
27991 e.lineno = 0;
27992 e.info = NULL;
27993 vec_safe_push (macinfo_table, e);
27994 }
27995 e.code = DW_MACINFO_define;
27996 e.lineno = lineno;
27997 e.info = ggc_strdup (buffer);
27998 vec_safe_push (macinfo_table, e);
27999 }
28000 }
28001
28002 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28003 the tail part of the directive line, i.e. the part which is past the
28004 initial whitespace, #, whitespace, directive-name, whitespace part. */
28005
28006 static void
28007 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28008 const char *buffer ATTRIBUTE_UNUSED)
28009 {
28010 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28011 {
28012 macinfo_entry e;
28013 /* Insert a dummy first entry to be able to optimize the whole
28014 predefined macro block using DW_MACRO_import. */
28015 if (macinfo_table->is_empty () && lineno <= 1)
28016 {
28017 e.code = 0;
28018 e.lineno = 0;
28019 e.info = NULL;
28020 vec_safe_push (macinfo_table, e);
28021 }
28022 e.code = DW_MACINFO_undef;
28023 e.lineno = lineno;
28024 e.info = ggc_strdup (buffer);
28025 vec_safe_push (macinfo_table, e);
28026 }
28027 }
28028
28029 /* Helpers to manipulate hash table of CUs. */
28030
28031 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28032 {
28033 static inline hashval_t hash (const macinfo_entry *);
28034 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28035 };
28036
28037 inline hashval_t
28038 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28039 {
28040 return htab_hash_string (entry->info);
28041 }
28042
28043 inline bool
28044 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28045 const macinfo_entry *entry2)
28046 {
28047 return !strcmp (entry1->info, entry2->info);
28048 }
28049
28050 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28051
28052 /* Output a single .debug_macinfo entry. */
28053
28054 static void
28055 output_macinfo_op (macinfo_entry *ref)
28056 {
28057 int file_num;
28058 size_t len;
28059 struct indirect_string_node *node;
28060 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28061 struct dwarf_file_data *fd;
28062
28063 switch (ref->code)
28064 {
28065 case DW_MACINFO_start_file:
28066 fd = lookup_filename (ref->info);
28067 file_num = maybe_emit_file (fd);
28068 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28069 dw2_asm_output_data_uleb128 (ref->lineno,
28070 "Included from line number %lu",
28071 (unsigned long) ref->lineno);
28072 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28073 break;
28074 case DW_MACINFO_end_file:
28075 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28076 break;
28077 case DW_MACINFO_define:
28078 case DW_MACINFO_undef:
28079 len = strlen (ref->info) + 1;
28080 if (!dwarf_strict
28081 && len > DWARF_OFFSET_SIZE
28082 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28083 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28084 {
28085 ref->code = ref->code == DW_MACINFO_define
28086 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28087 output_macinfo_op (ref);
28088 return;
28089 }
28090 dw2_asm_output_data (1, ref->code,
28091 ref->code == DW_MACINFO_define
28092 ? "Define macro" : "Undefine macro");
28093 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28094 (unsigned long) ref->lineno);
28095 dw2_asm_output_nstring (ref->info, -1, "The macro");
28096 break;
28097 case DW_MACRO_define_strp:
28098 case DW_MACRO_undef_strp:
28099 node = find_AT_string (ref->info);
28100 gcc_assert (node
28101 && (node->form == DW_FORM_strp
28102 || node->form == dwarf_FORM (DW_FORM_strx)));
28103 dw2_asm_output_data (1, ref->code,
28104 ref->code == DW_MACRO_define_strp
28105 ? "Define macro strp"
28106 : "Undefine macro strp");
28107 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28108 (unsigned long) ref->lineno);
28109 if (node->form == DW_FORM_strp)
28110 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28111 debug_str_section, "The macro: \"%s\"",
28112 ref->info);
28113 else
28114 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28115 ref->info);
28116 break;
28117 case DW_MACRO_import:
28118 dw2_asm_output_data (1, ref->code, "Import");
28119 ASM_GENERATE_INTERNAL_LABEL (label,
28120 DEBUG_MACRO_SECTION_LABEL,
28121 ref->lineno + macinfo_label_base);
28122 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28123 break;
28124 default:
28125 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28126 ASM_COMMENT_START, (unsigned long) ref->code);
28127 break;
28128 }
28129 }
28130
28131 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28132 other compilation unit .debug_macinfo sections. IDX is the first
28133 index of a define/undef, return the number of ops that should be
28134 emitted in a comdat .debug_macinfo section and emit
28135 a DW_MACRO_import entry referencing it.
28136 If the define/undef entry should be emitted normally, return 0. */
28137
28138 static unsigned
28139 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28140 macinfo_hash_type **macinfo_htab)
28141 {
28142 macinfo_entry *first, *second, *cur, *inc;
28143 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28144 unsigned char checksum[16];
28145 struct md5_ctx ctx;
28146 char *grp_name, *tail;
28147 const char *base;
28148 unsigned int i, count, encoded_filename_len, linebuf_len;
28149 macinfo_entry **slot;
28150
28151 first = &(*macinfo_table)[idx];
28152 second = &(*macinfo_table)[idx + 1];
28153
28154 /* Optimize only if there are at least two consecutive define/undef ops,
28155 and either all of them are before first DW_MACINFO_start_file
28156 with lineno {0,1} (i.e. predefined macro block), or all of them are
28157 in some included header file. */
28158 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28159 return 0;
28160 if (vec_safe_is_empty (files))
28161 {
28162 if (first->lineno > 1 || second->lineno > 1)
28163 return 0;
28164 }
28165 else if (first->lineno == 0)
28166 return 0;
28167
28168 /* Find the last define/undef entry that can be grouped together
28169 with first and at the same time compute md5 checksum of their
28170 codes, linenumbers and strings. */
28171 md5_init_ctx (&ctx);
28172 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28173 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28174 break;
28175 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28176 break;
28177 else
28178 {
28179 unsigned char code = cur->code;
28180 md5_process_bytes (&code, 1, &ctx);
28181 checksum_uleb128 (cur->lineno, &ctx);
28182 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28183 }
28184 md5_finish_ctx (&ctx, checksum);
28185 count = i - idx;
28186
28187 /* From the containing include filename (if any) pick up just
28188 usable characters from its basename. */
28189 if (vec_safe_is_empty (files))
28190 base = "";
28191 else
28192 base = lbasename (files->last ().info);
28193 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28194 if (ISIDNUM (base[i]) || base[i] == '.')
28195 encoded_filename_len++;
28196 /* Count . at the end. */
28197 if (encoded_filename_len)
28198 encoded_filename_len++;
28199
28200 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28201 linebuf_len = strlen (linebuf);
28202
28203 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28204 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28205 + 16 * 2 + 1);
28206 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28207 tail = grp_name + 4;
28208 if (encoded_filename_len)
28209 {
28210 for (i = 0; base[i]; i++)
28211 if (ISIDNUM (base[i]) || base[i] == '.')
28212 *tail++ = base[i];
28213 *tail++ = '.';
28214 }
28215 memcpy (tail, linebuf, linebuf_len);
28216 tail += linebuf_len;
28217 *tail++ = '.';
28218 for (i = 0; i < 16; i++)
28219 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28220
28221 /* Construct a macinfo_entry for DW_MACRO_import
28222 in the empty vector entry before the first define/undef. */
28223 inc = &(*macinfo_table)[idx - 1];
28224 inc->code = DW_MACRO_import;
28225 inc->lineno = 0;
28226 inc->info = ggc_strdup (grp_name);
28227 if (!*macinfo_htab)
28228 *macinfo_htab = new macinfo_hash_type (10);
28229 /* Avoid emitting duplicates. */
28230 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28231 if (*slot != NULL)
28232 {
28233 inc->code = 0;
28234 inc->info = NULL;
28235 /* If such an entry has been used before, just emit
28236 a DW_MACRO_import op. */
28237 inc = *slot;
28238 output_macinfo_op (inc);
28239 /* And clear all macinfo_entry in the range to avoid emitting them
28240 in the second pass. */
28241 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28242 {
28243 cur->code = 0;
28244 cur->info = NULL;
28245 }
28246 }
28247 else
28248 {
28249 *slot = inc;
28250 inc->lineno = (*macinfo_htab)->elements ();
28251 output_macinfo_op (inc);
28252 }
28253 return count;
28254 }
28255
28256 /* Save any strings needed by the macinfo table in the debug str
28257 table. All strings must be collected into the table by the time
28258 index_string is called. */
28259
28260 static void
28261 save_macinfo_strings (void)
28262 {
28263 unsigned len;
28264 unsigned i;
28265 macinfo_entry *ref;
28266
28267 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28268 {
28269 switch (ref->code)
28270 {
28271 /* Match the logic in output_macinfo_op to decide on
28272 indirect strings. */
28273 case DW_MACINFO_define:
28274 case DW_MACINFO_undef:
28275 len = strlen (ref->info) + 1;
28276 if (!dwarf_strict
28277 && len > DWARF_OFFSET_SIZE
28278 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28279 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28280 set_indirect_string (find_AT_string (ref->info));
28281 break;
28282 case DW_MACRO_define_strp:
28283 case DW_MACRO_undef_strp:
28284 set_indirect_string (find_AT_string (ref->info));
28285 break;
28286 default:
28287 break;
28288 }
28289 }
28290 }
28291
28292 /* Output macinfo section(s). */
28293
28294 static void
28295 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28296 {
28297 unsigned i;
28298 unsigned long length = vec_safe_length (macinfo_table);
28299 macinfo_entry *ref;
28300 vec<macinfo_entry, va_gc> *files = NULL;
28301 macinfo_hash_type *macinfo_htab = NULL;
28302 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28303
28304 if (! length)
28305 return;
28306
28307 /* output_macinfo* uses these interchangeably. */
28308 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28309 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28310 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28311 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28312
28313 /* AIX Assembler inserts the length, so adjust the reference to match the
28314 offset expected by debuggers. */
28315 strcpy (dl_section_ref, debug_line_label);
28316 if (XCOFF_DEBUGGING_INFO)
28317 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28318
28319 /* For .debug_macro emit the section header. */
28320 if (!dwarf_strict || dwarf_version >= 5)
28321 {
28322 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28323 "DWARF macro version number");
28324 if (DWARF_OFFSET_SIZE == 8)
28325 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28326 else
28327 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28328 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28329 debug_line_section, NULL);
28330 }
28331
28332 /* In the first loop, it emits the primary .debug_macinfo section
28333 and after each emitted op the macinfo_entry is cleared.
28334 If a longer range of define/undef ops can be optimized using
28335 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28336 the vector before the first define/undef in the range and the
28337 whole range of define/undef ops is not emitted and kept. */
28338 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28339 {
28340 switch (ref->code)
28341 {
28342 case DW_MACINFO_start_file:
28343 vec_safe_push (files, *ref);
28344 break;
28345 case DW_MACINFO_end_file:
28346 if (!vec_safe_is_empty (files))
28347 files->pop ();
28348 break;
28349 case DW_MACINFO_define:
28350 case DW_MACINFO_undef:
28351 if ((!dwarf_strict || dwarf_version >= 5)
28352 && HAVE_COMDAT_GROUP
28353 && vec_safe_length (files) != 1
28354 && i > 0
28355 && i + 1 < length
28356 && (*macinfo_table)[i - 1].code == 0)
28357 {
28358 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28359 if (count)
28360 {
28361 i += count - 1;
28362 continue;
28363 }
28364 }
28365 break;
28366 case 0:
28367 /* A dummy entry may be inserted at the beginning to be able
28368 to optimize the whole block of predefined macros. */
28369 if (i == 0)
28370 continue;
28371 default:
28372 break;
28373 }
28374 output_macinfo_op (ref);
28375 ref->info = NULL;
28376 ref->code = 0;
28377 }
28378
28379 if (!macinfo_htab)
28380 return;
28381
28382 /* Save the number of transparent includes so we can adjust the
28383 label number for the fat LTO object DWARF. */
28384 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28385
28386 delete macinfo_htab;
28387 macinfo_htab = NULL;
28388
28389 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28390 terminate the current chain and switch to a new comdat .debug_macinfo
28391 section and emit the define/undef entries within it. */
28392 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28393 switch (ref->code)
28394 {
28395 case 0:
28396 continue;
28397 case DW_MACRO_import:
28398 {
28399 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28400 tree comdat_key = get_identifier (ref->info);
28401 /* Terminate the previous .debug_macinfo section. */
28402 dw2_asm_output_data (1, 0, "End compilation unit");
28403 targetm.asm_out.named_section (debug_macinfo_section_name,
28404 SECTION_DEBUG
28405 | SECTION_LINKONCE
28406 | (early_lto_debug
28407 ? SECTION_EXCLUDE : 0),
28408 comdat_key);
28409 ASM_GENERATE_INTERNAL_LABEL (label,
28410 DEBUG_MACRO_SECTION_LABEL,
28411 ref->lineno + macinfo_label_base);
28412 ASM_OUTPUT_LABEL (asm_out_file, label);
28413 ref->code = 0;
28414 ref->info = NULL;
28415 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28416 "DWARF macro version number");
28417 if (DWARF_OFFSET_SIZE == 8)
28418 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28419 else
28420 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28421 }
28422 break;
28423 case DW_MACINFO_define:
28424 case DW_MACINFO_undef:
28425 output_macinfo_op (ref);
28426 ref->code = 0;
28427 ref->info = NULL;
28428 break;
28429 default:
28430 gcc_unreachable ();
28431 }
28432
28433 macinfo_label_base += macinfo_label_base_adj;
28434 }
28435
28436 /* Initialize the various sections and labels for dwarf output and prefix
28437 them with PREFIX if non-NULL. Returns the generation (zero based
28438 number of times function was called). */
28439
28440 static unsigned
28441 init_sections_and_labels (bool early_lto_debug)
28442 {
28443 /* As we may get called multiple times have a generation count for
28444 labels. */
28445 static unsigned generation = 0;
28446
28447 if (early_lto_debug)
28448 {
28449 if (!dwarf_split_debug_info)
28450 {
28451 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28452 SECTION_DEBUG | SECTION_EXCLUDE,
28453 NULL);
28454 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28455 SECTION_DEBUG | SECTION_EXCLUDE,
28456 NULL);
28457 debug_macinfo_section_name
28458 = ((dwarf_strict && dwarf_version < 5)
28459 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28460 debug_macinfo_section = get_section (debug_macinfo_section_name,
28461 SECTION_DEBUG
28462 | SECTION_EXCLUDE, NULL);
28463 }
28464 else
28465 {
28466 /* ??? Which of the following do we need early? */
28467 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28468 SECTION_DEBUG | SECTION_EXCLUDE,
28469 NULL);
28470 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28471 SECTION_DEBUG | SECTION_EXCLUDE,
28472 NULL);
28473 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28474 SECTION_DEBUG
28475 | SECTION_EXCLUDE, NULL);
28476 debug_skeleton_abbrev_section
28477 = get_section (DEBUG_LTO_ABBREV_SECTION,
28478 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28479 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28480 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28481 generation);
28482
28483 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28484 stay in the main .o, but the skeleton_line goes into the split
28485 off dwo. */
28486 debug_skeleton_line_section
28487 = get_section (DEBUG_LTO_LINE_SECTION,
28488 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28489 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28490 DEBUG_SKELETON_LINE_SECTION_LABEL,
28491 generation);
28492 debug_str_offsets_section
28493 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28494 SECTION_DEBUG | SECTION_EXCLUDE,
28495 NULL);
28496 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28497 DEBUG_SKELETON_INFO_SECTION_LABEL,
28498 generation);
28499 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28500 DEBUG_STR_DWO_SECTION_FLAGS,
28501 NULL);
28502 debug_macinfo_section_name
28503 = ((dwarf_strict && dwarf_version < 5)
28504 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28505 debug_macinfo_section = get_section (debug_macinfo_section_name,
28506 SECTION_DEBUG | SECTION_EXCLUDE,
28507 NULL);
28508 }
28509 /* For macro info and the file table we have to refer to a
28510 debug_line section. */
28511 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28512 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28513 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28514 DEBUG_LINE_SECTION_LABEL, generation);
28515
28516 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28517 DEBUG_STR_SECTION_FLAGS
28518 | SECTION_EXCLUDE, NULL);
28519 if (!dwarf_split_debug_info)
28520 debug_line_str_section
28521 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28522 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28523 }
28524 else
28525 {
28526 if (!dwarf_split_debug_info)
28527 {
28528 debug_info_section = get_section (DEBUG_INFO_SECTION,
28529 SECTION_DEBUG, NULL);
28530 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28531 SECTION_DEBUG, NULL);
28532 debug_loc_section = get_section (dwarf_version >= 5
28533 ? DEBUG_LOCLISTS_SECTION
28534 : DEBUG_LOC_SECTION,
28535 SECTION_DEBUG, NULL);
28536 debug_macinfo_section_name
28537 = ((dwarf_strict && dwarf_version < 5)
28538 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28539 debug_macinfo_section = get_section (debug_macinfo_section_name,
28540 SECTION_DEBUG, NULL);
28541 }
28542 else
28543 {
28544 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28545 SECTION_DEBUG | SECTION_EXCLUDE,
28546 NULL);
28547 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28548 SECTION_DEBUG | SECTION_EXCLUDE,
28549 NULL);
28550 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28551 SECTION_DEBUG, NULL);
28552 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28553 SECTION_DEBUG, NULL);
28554 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28555 SECTION_DEBUG, NULL);
28556 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28557 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28558 generation);
28559
28560 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28561 stay in the main .o, but the skeleton_line goes into the
28562 split off dwo. */
28563 debug_skeleton_line_section
28564 = get_section (DEBUG_DWO_LINE_SECTION,
28565 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28566 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28567 DEBUG_SKELETON_LINE_SECTION_LABEL,
28568 generation);
28569 debug_str_offsets_section
28570 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28571 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28572 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28573 DEBUG_SKELETON_INFO_SECTION_LABEL,
28574 generation);
28575 debug_loc_section = get_section (dwarf_version >= 5
28576 ? DEBUG_DWO_LOCLISTS_SECTION
28577 : DEBUG_DWO_LOC_SECTION,
28578 SECTION_DEBUG | SECTION_EXCLUDE,
28579 NULL);
28580 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28581 DEBUG_STR_DWO_SECTION_FLAGS,
28582 NULL);
28583 debug_macinfo_section_name
28584 = ((dwarf_strict && dwarf_version < 5)
28585 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28586 debug_macinfo_section = get_section (debug_macinfo_section_name,
28587 SECTION_DEBUG | SECTION_EXCLUDE,
28588 NULL);
28589 }
28590 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28591 SECTION_DEBUG, NULL);
28592 debug_line_section = get_section (DEBUG_LINE_SECTION,
28593 SECTION_DEBUG, NULL);
28594 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28595 SECTION_DEBUG, NULL);
28596 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28597 SECTION_DEBUG, NULL);
28598 debug_str_section = get_section (DEBUG_STR_SECTION,
28599 DEBUG_STR_SECTION_FLAGS, NULL);
28600 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28601 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28602 DEBUG_STR_SECTION_FLAGS, NULL);
28603
28604 debug_ranges_section = get_section (dwarf_version >= 5
28605 ? DEBUG_RNGLISTS_SECTION
28606 : DEBUG_RANGES_SECTION,
28607 SECTION_DEBUG, NULL);
28608 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28609 SECTION_DEBUG, NULL);
28610 }
28611
28612 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28613 DEBUG_ABBREV_SECTION_LABEL, generation);
28614 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28615 DEBUG_INFO_SECTION_LABEL, generation);
28616 info_section_emitted = false;
28617 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28618 DEBUG_LINE_SECTION_LABEL, generation);
28619 /* There are up to 4 unique ranges labels per generation.
28620 See also output_rnglists. */
28621 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28622 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28623 if (dwarf_version >= 5 && dwarf_split_debug_info)
28624 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28625 DEBUG_RANGES_SECTION_LABEL,
28626 1 + generation * 4);
28627 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28628 DEBUG_ADDR_SECTION_LABEL, generation);
28629 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28630 (dwarf_strict && dwarf_version < 5)
28631 ? DEBUG_MACINFO_SECTION_LABEL
28632 : DEBUG_MACRO_SECTION_LABEL, generation);
28633 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28634 generation);
28635
28636 ++generation;
28637 return generation - 1;
28638 }
28639
28640 /* Set up for Dwarf output at the start of compilation. */
28641
28642 static void
28643 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28644 {
28645 /* Allocate the file_table. */
28646 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28647
28648 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28649 /* Allocate the decl_die_table. */
28650 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28651
28652 /* Allocate the decl_loc_table. */
28653 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28654
28655 /* Allocate the cached_dw_loc_list_table. */
28656 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28657
28658 /* Allocate the initial hunk of the abbrev_die_table. */
28659 vec_alloc (abbrev_die_table, 256);
28660 /* Zero-th entry is allocated, but unused. */
28661 abbrev_die_table->quick_push (NULL);
28662
28663 /* Allocate the dwarf_proc_stack_usage_map. */
28664 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28665
28666 /* Allocate the pubtypes and pubnames vectors. */
28667 vec_alloc (pubname_table, 32);
28668 vec_alloc (pubtype_table, 32);
28669
28670 vec_alloc (incomplete_types, 64);
28671
28672 vec_alloc (used_rtx_array, 32);
28673
28674 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28675 vec_alloc (macinfo_table, 64);
28676 #endif
28677
28678 /* If front-ends already registered a main translation unit but we were not
28679 ready to perform the association, do this now. */
28680 if (main_translation_unit != NULL_TREE)
28681 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28682 }
28683
28684 /* Called before compile () starts outputtting functions, variables
28685 and toplevel asms into assembly. */
28686
28687 static void
28688 dwarf2out_assembly_start (void)
28689 {
28690 if (text_section_line_info)
28691 return;
28692
28693 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28694 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28695 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28696 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28697 COLD_TEXT_SECTION_LABEL, 0);
28698 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28699
28700 switch_to_section (text_section);
28701 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28702 #endif
28703
28704 /* Make sure the line number table for .text always exists. */
28705 text_section_line_info = new_line_info_table ();
28706 text_section_line_info->end_label = text_end_label;
28707
28708 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28709 cur_line_info_table = text_section_line_info;
28710 #endif
28711
28712 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28713 && dwarf2out_do_cfi_asm ()
28714 && !dwarf2out_do_eh_frame ())
28715 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28716 }
28717
28718 /* A helper function for dwarf2out_finish called through
28719 htab_traverse. Assign a string its index. All strings must be
28720 collected into the table by the time index_string is called,
28721 because the indexing code relies on htab_traverse to traverse nodes
28722 in the same order for each run. */
28723
28724 int
28725 index_string (indirect_string_node **h, unsigned int *index)
28726 {
28727 indirect_string_node *node = *h;
28728
28729 find_string_form (node);
28730 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28731 {
28732 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28733 node->index = *index;
28734 *index += 1;
28735 }
28736 return 1;
28737 }
28738
28739 /* A helper function for output_indirect_strings called through
28740 htab_traverse. Output the offset to a string and update the
28741 current offset. */
28742
28743 int
28744 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28745 {
28746 indirect_string_node *node = *h;
28747
28748 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28749 {
28750 /* Assert that this node has been assigned an index. */
28751 gcc_assert (node->index != NO_INDEX_ASSIGNED
28752 && node->index != NOT_INDEXED);
28753 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28754 "indexed string 0x%x: %s", node->index, node->str);
28755 *offset += strlen (node->str) + 1;
28756 }
28757 return 1;
28758 }
28759
28760 /* A helper function for dwarf2out_finish called through
28761 htab_traverse. Output the indexed string. */
28762
28763 int
28764 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28765 {
28766 struct indirect_string_node *node = *h;
28767
28768 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28769 {
28770 /* Assert that the strings are output in the same order as their
28771 indexes were assigned. */
28772 gcc_assert (*cur_idx == node->index);
28773 assemble_string (node->str, strlen (node->str) + 1);
28774 *cur_idx += 1;
28775 }
28776 return 1;
28777 }
28778
28779 /* A helper function for output_indirect_strings. Counts the number
28780 of index strings offsets. Must match the logic of the functions
28781 output_index_string[_offsets] above. */
28782 int
28783 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28784 {
28785 struct indirect_string_node *node = *h;
28786
28787 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28788 *last_idx += 1;
28789 return 1;
28790 }
28791
28792 /* A helper function for dwarf2out_finish called through
28793 htab_traverse. Emit one queued .debug_str string. */
28794
28795 int
28796 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28797 {
28798 struct indirect_string_node *node = *h;
28799
28800 node->form = find_string_form (node);
28801 if (node->form == form && node->refcount > 0)
28802 {
28803 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28804 assemble_string (node->str, strlen (node->str) + 1);
28805 }
28806
28807 return 1;
28808 }
28809
28810 /* Output the indexed string table. */
28811
28812 static void
28813 output_indirect_strings (void)
28814 {
28815 switch_to_section (debug_str_section);
28816 if (!dwarf_split_debug_info)
28817 debug_str_hash->traverse<enum dwarf_form,
28818 output_indirect_string> (DW_FORM_strp);
28819 else
28820 {
28821 unsigned int offset = 0;
28822 unsigned int cur_idx = 0;
28823
28824 if (skeleton_debug_str_hash)
28825 skeleton_debug_str_hash->traverse<enum dwarf_form,
28826 output_indirect_string> (DW_FORM_strp);
28827
28828 switch_to_section (debug_str_offsets_section);
28829 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28830 header. Note that we don't need to generate a label to the
28831 actual index table following the header here, because this is
28832 for the split dwarf case only. In an .dwo file there is only
28833 one string offsets table (and one debug info section). But
28834 if we would start using string offset tables for the main (or
28835 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28836 pointing to the actual index after the header. Split dwarf
28837 units will never have a string offsets base attribute. When
28838 a split unit is moved into a .dwp file the string offsets can
28839 be found through the .debug_cu_index section table. */
28840 if (dwarf_version >= 5)
28841 {
28842 unsigned int last_idx = 0;
28843 unsigned long str_offsets_length;
28844
28845 debug_str_hash->traverse_noresize
28846 <unsigned int *, count_index_strings> (&last_idx);
28847 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28848 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28849 dw2_asm_output_data (4, 0xffffffff,
28850 "Escape value for 64-bit DWARF extension");
28851 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28852 "Length of string offsets unit");
28853 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28854 dw2_asm_output_data (2, 0, "Header zero padding");
28855 }
28856 debug_str_hash->traverse_noresize
28857 <unsigned int *, output_index_string_offset> (&offset);
28858 switch_to_section (debug_str_dwo_section);
28859 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28860 (&cur_idx);
28861 }
28862 }
28863
28864 /* Callback for htab_traverse to assign an index to an entry in the
28865 table, and to write that entry to the .debug_addr section. */
28866
28867 int
28868 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28869 {
28870 addr_table_entry *entry = *slot;
28871
28872 if (entry->refcount == 0)
28873 {
28874 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28875 || entry->index == NOT_INDEXED);
28876 return 1;
28877 }
28878
28879 gcc_assert (entry->index == *cur_index);
28880 (*cur_index)++;
28881
28882 switch (entry->kind)
28883 {
28884 case ate_kind_rtx:
28885 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28886 "0x%x", entry->index);
28887 break;
28888 case ate_kind_rtx_dtprel:
28889 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28890 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28891 DWARF2_ADDR_SIZE,
28892 entry->addr.rtl);
28893 fputc ('\n', asm_out_file);
28894 break;
28895 case ate_kind_label:
28896 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28897 "0x%x", entry->index);
28898 break;
28899 default:
28900 gcc_unreachable ();
28901 }
28902 return 1;
28903 }
28904
28905 /* A helper function for dwarf2out_finish. Counts the number
28906 of indexed addresses. Must match the logic of the functions
28907 output_addr_table_entry above. */
28908 int
28909 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
28910 {
28911 addr_table_entry *entry = *slot;
28912
28913 if (entry->refcount > 0)
28914 *last_idx += 1;
28915 return 1;
28916 }
28917
28918 /* Produce the .debug_addr section. */
28919
28920 static void
28921 output_addr_table (void)
28922 {
28923 unsigned int index = 0;
28924 if (addr_index_table == NULL || addr_index_table->size () == 0)
28925 return;
28926
28927 switch_to_section (debug_addr_section);
28928 addr_index_table
28929 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28930 }
28931
28932 #if ENABLE_ASSERT_CHECKING
28933 /* Verify that all marks are clear. */
28934
28935 static void
28936 verify_marks_clear (dw_die_ref die)
28937 {
28938 dw_die_ref c;
28939
28940 gcc_assert (! die->die_mark);
28941 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28942 }
28943 #endif /* ENABLE_ASSERT_CHECKING */
28944
28945 /* Clear the marks for a die and its children.
28946 Be cool if the mark isn't set. */
28947
28948 static void
28949 prune_unmark_dies (dw_die_ref die)
28950 {
28951 dw_die_ref c;
28952
28953 if (die->die_mark)
28954 die->die_mark = 0;
28955 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28956 }
28957
28958 /* Given LOC that is referenced by a DIE we're marking as used, find all
28959 referenced DWARF procedures it references and mark them as used. */
28960
28961 static void
28962 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28963 {
28964 for (; loc != NULL; loc = loc->dw_loc_next)
28965 switch (loc->dw_loc_opc)
28966 {
28967 case DW_OP_implicit_pointer:
28968 case DW_OP_convert:
28969 case DW_OP_reinterpret:
28970 case DW_OP_GNU_implicit_pointer:
28971 case DW_OP_GNU_convert:
28972 case DW_OP_GNU_reinterpret:
28973 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28974 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28975 break;
28976 case DW_OP_GNU_variable_value:
28977 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28978 {
28979 dw_die_ref ref
28980 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28981 if (ref == NULL)
28982 break;
28983 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28984 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28985 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28986 }
28987 /* FALLTHRU */
28988 case DW_OP_call2:
28989 case DW_OP_call4:
28990 case DW_OP_call_ref:
28991 case DW_OP_const_type:
28992 case DW_OP_GNU_const_type:
28993 case DW_OP_GNU_parameter_ref:
28994 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
28995 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28996 break;
28997 case DW_OP_regval_type:
28998 case DW_OP_deref_type:
28999 case DW_OP_GNU_regval_type:
29000 case DW_OP_GNU_deref_type:
29001 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29002 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29003 break;
29004 case DW_OP_entry_value:
29005 case DW_OP_GNU_entry_value:
29006 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29007 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29008 break;
29009 default:
29010 break;
29011 }
29012 }
29013
29014 /* Given DIE that we're marking as used, find any other dies
29015 it references as attributes and mark them as used. */
29016
29017 static void
29018 prune_unused_types_walk_attribs (dw_die_ref die)
29019 {
29020 dw_attr_node *a;
29021 unsigned ix;
29022
29023 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29024 {
29025 switch (AT_class (a))
29026 {
29027 /* Make sure DWARF procedures referenced by location descriptions will
29028 get emitted. */
29029 case dw_val_class_loc:
29030 prune_unused_types_walk_loc_descr (AT_loc (a));
29031 break;
29032 case dw_val_class_loc_list:
29033 for (dw_loc_list_ref list = AT_loc_list (a);
29034 list != NULL;
29035 list = list->dw_loc_next)
29036 prune_unused_types_walk_loc_descr (list->expr);
29037 break;
29038
29039 case dw_val_class_view_list:
29040 /* This points to a loc_list in another attribute, so it's
29041 already covered. */
29042 break;
29043
29044 case dw_val_class_die_ref:
29045 /* A reference to another DIE.
29046 Make sure that it will get emitted.
29047 If it was broken out into a comdat group, don't follow it. */
29048 if (! AT_ref (a)->comdat_type_p
29049 || a->dw_attr == DW_AT_specification)
29050 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29051 break;
29052
29053 case dw_val_class_str:
29054 /* Set the string's refcount to 0 so that prune_unused_types_mark
29055 accounts properly for it. */
29056 a->dw_attr_val.v.val_str->refcount = 0;
29057 break;
29058
29059 default:
29060 break;
29061 }
29062 }
29063 }
29064
29065 /* Mark the generic parameters and arguments children DIEs of DIE. */
29066
29067 static void
29068 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29069 {
29070 dw_die_ref c;
29071
29072 if (die == NULL || die->die_child == NULL)
29073 return;
29074 c = die->die_child;
29075 do
29076 {
29077 if (is_template_parameter (c))
29078 prune_unused_types_mark (c, 1);
29079 c = c->die_sib;
29080 } while (c && c != die->die_child);
29081 }
29082
29083 /* Mark DIE as being used. If DOKIDS is true, then walk down
29084 to DIE's children. */
29085
29086 static void
29087 prune_unused_types_mark (dw_die_ref die, int dokids)
29088 {
29089 dw_die_ref c;
29090
29091 if (die->die_mark == 0)
29092 {
29093 /* We haven't done this node yet. Mark it as used. */
29094 die->die_mark = 1;
29095 /* If this is the DIE of a generic type instantiation,
29096 mark the children DIEs that describe its generic parms and
29097 args. */
29098 prune_unused_types_mark_generic_parms_dies (die);
29099
29100 /* We also have to mark its parents as used.
29101 (But we don't want to mark our parent's kids due to this,
29102 unless it is a class.) */
29103 if (die->die_parent)
29104 prune_unused_types_mark (die->die_parent,
29105 class_scope_p (die->die_parent));
29106
29107 /* Mark any referenced nodes. */
29108 prune_unused_types_walk_attribs (die);
29109
29110 /* If this node is a specification,
29111 also mark the definition, if it exists. */
29112 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29113 prune_unused_types_mark (die->die_definition, 1);
29114 }
29115
29116 if (dokids && die->die_mark != 2)
29117 {
29118 /* We need to walk the children, but haven't done so yet.
29119 Remember that we've walked the kids. */
29120 die->die_mark = 2;
29121
29122 /* If this is an array type, we need to make sure our
29123 kids get marked, even if they're types. If we're
29124 breaking out types into comdat sections, do this
29125 for all type definitions. */
29126 if (die->die_tag == DW_TAG_array_type
29127 || (use_debug_types
29128 && is_type_die (die) && ! is_declaration_die (die)))
29129 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29130 else
29131 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29132 }
29133 }
29134
29135 /* For local classes, look if any static member functions were emitted
29136 and if so, mark them. */
29137
29138 static void
29139 prune_unused_types_walk_local_classes (dw_die_ref die)
29140 {
29141 dw_die_ref c;
29142
29143 if (die->die_mark == 2)
29144 return;
29145
29146 switch (die->die_tag)
29147 {
29148 case DW_TAG_structure_type:
29149 case DW_TAG_union_type:
29150 case DW_TAG_class_type:
29151 break;
29152
29153 case DW_TAG_subprogram:
29154 if (!get_AT_flag (die, DW_AT_declaration)
29155 || die->die_definition != NULL)
29156 prune_unused_types_mark (die, 1);
29157 return;
29158
29159 default:
29160 return;
29161 }
29162
29163 /* Mark children. */
29164 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29165 }
29166
29167 /* Walk the tree DIE and mark types that we actually use. */
29168
29169 static void
29170 prune_unused_types_walk (dw_die_ref die)
29171 {
29172 dw_die_ref c;
29173
29174 /* Don't do anything if this node is already marked and
29175 children have been marked as well. */
29176 if (die->die_mark == 2)
29177 return;
29178
29179 switch (die->die_tag)
29180 {
29181 case DW_TAG_structure_type:
29182 case DW_TAG_union_type:
29183 case DW_TAG_class_type:
29184 if (die->die_perennial_p)
29185 break;
29186
29187 for (c = die->die_parent; c; c = c->die_parent)
29188 if (c->die_tag == DW_TAG_subprogram)
29189 break;
29190
29191 /* Finding used static member functions inside of classes
29192 is needed just for local classes, because for other classes
29193 static member function DIEs with DW_AT_specification
29194 are emitted outside of the DW_TAG_*_type. If we ever change
29195 it, we'd need to call this even for non-local classes. */
29196 if (c)
29197 prune_unused_types_walk_local_classes (die);
29198
29199 /* It's a type node --- don't mark it. */
29200 return;
29201
29202 case DW_TAG_const_type:
29203 case DW_TAG_packed_type:
29204 case DW_TAG_pointer_type:
29205 case DW_TAG_reference_type:
29206 case DW_TAG_rvalue_reference_type:
29207 case DW_TAG_volatile_type:
29208 case DW_TAG_typedef:
29209 case DW_TAG_array_type:
29210 case DW_TAG_interface_type:
29211 case DW_TAG_friend:
29212 case DW_TAG_enumeration_type:
29213 case DW_TAG_subroutine_type:
29214 case DW_TAG_string_type:
29215 case DW_TAG_set_type:
29216 case DW_TAG_subrange_type:
29217 case DW_TAG_ptr_to_member_type:
29218 case DW_TAG_file_type:
29219 /* Type nodes are useful only when other DIEs reference them --- don't
29220 mark them. */
29221 /* FALLTHROUGH */
29222
29223 case DW_TAG_dwarf_procedure:
29224 /* Likewise for DWARF procedures. */
29225
29226 if (die->die_perennial_p)
29227 break;
29228
29229 return;
29230
29231 default:
29232 /* Mark everything else. */
29233 break;
29234 }
29235
29236 if (die->die_mark == 0)
29237 {
29238 die->die_mark = 1;
29239
29240 /* Now, mark any dies referenced from here. */
29241 prune_unused_types_walk_attribs (die);
29242 }
29243
29244 die->die_mark = 2;
29245
29246 /* Mark children. */
29247 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29248 }
29249
29250 /* Increment the string counts on strings referred to from DIE's
29251 attributes. */
29252
29253 static void
29254 prune_unused_types_update_strings (dw_die_ref die)
29255 {
29256 dw_attr_node *a;
29257 unsigned ix;
29258
29259 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29260 if (AT_class (a) == dw_val_class_str)
29261 {
29262 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29263 s->refcount++;
29264 /* Avoid unnecessarily putting strings that are used less than
29265 twice in the hash table. */
29266 if (s->refcount
29267 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29268 {
29269 indirect_string_node **slot
29270 = debug_str_hash->find_slot_with_hash (s->str,
29271 htab_hash_string (s->str),
29272 INSERT);
29273 gcc_assert (*slot == NULL);
29274 *slot = s;
29275 }
29276 }
29277 }
29278
29279 /* Mark DIE and its children as removed. */
29280
29281 static void
29282 mark_removed (dw_die_ref die)
29283 {
29284 dw_die_ref c;
29285 die->removed = true;
29286 FOR_EACH_CHILD (die, c, mark_removed (c));
29287 }
29288
29289 /* Remove from the tree DIE any dies that aren't marked. */
29290
29291 static void
29292 prune_unused_types_prune (dw_die_ref die)
29293 {
29294 dw_die_ref c;
29295
29296 gcc_assert (die->die_mark);
29297 prune_unused_types_update_strings (die);
29298
29299 if (! die->die_child)
29300 return;
29301
29302 c = die->die_child;
29303 do {
29304 dw_die_ref prev = c, next;
29305 for (c = c->die_sib; ! c->die_mark; c = next)
29306 if (c == die->die_child)
29307 {
29308 /* No marked children between 'prev' and the end of the list. */
29309 if (prev == c)
29310 /* No marked children at all. */
29311 die->die_child = NULL;
29312 else
29313 {
29314 prev->die_sib = c->die_sib;
29315 die->die_child = prev;
29316 }
29317 c->die_sib = NULL;
29318 mark_removed (c);
29319 return;
29320 }
29321 else
29322 {
29323 next = c->die_sib;
29324 c->die_sib = NULL;
29325 mark_removed (c);
29326 }
29327
29328 if (c != prev->die_sib)
29329 prev->die_sib = c;
29330 prune_unused_types_prune (c);
29331 } while (c != die->die_child);
29332 }
29333
29334 /* Remove dies representing declarations that we never use. */
29335
29336 static void
29337 prune_unused_types (void)
29338 {
29339 unsigned int i;
29340 limbo_die_node *node;
29341 comdat_type_node *ctnode;
29342 pubname_entry *pub;
29343 dw_die_ref base_type;
29344
29345 #if ENABLE_ASSERT_CHECKING
29346 /* All the marks should already be clear. */
29347 verify_marks_clear (comp_unit_die ());
29348 for (node = limbo_die_list; node; node = node->next)
29349 verify_marks_clear (node->die);
29350 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29351 verify_marks_clear (ctnode->root_die);
29352 #endif /* ENABLE_ASSERT_CHECKING */
29353
29354 /* Mark types that are used in global variables. */
29355 premark_types_used_by_global_vars ();
29356
29357 /* Set the mark on nodes that are actually used. */
29358 prune_unused_types_walk (comp_unit_die ());
29359 for (node = limbo_die_list; node; node = node->next)
29360 prune_unused_types_walk (node->die);
29361 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29362 {
29363 prune_unused_types_walk (ctnode->root_die);
29364 prune_unused_types_mark (ctnode->type_die, 1);
29365 }
29366
29367 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29368 are unusual in that they are pubnames that are the children of pubtypes.
29369 They should only be marked via their parent DW_TAG_enumeration_type die,
29370 not as roots in themselves. */
29371 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29372 if (pub->die->die_tag != DW_TAG_enumerator)
29373 prune_unused_types_mark (pub->die, 1);
29374 for (i = 0; base_types.iterate (i, &base_type); i++)
29375 prune_unused_types_mark (base_type, 1);
29376
29377 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29378 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29379 callees). */
29380 cgraph_node *cnode;
29381 FOR_EACH_FUNCTION (cnode)
29382 if (cnode->referred_to_p (false))
29383 {
29384 dw_die_ref die = lookup_decl_die (cnode->decl);
29385 if (die == NULL || die->die_mark)
29386 continue;
29387 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29388 if (e->caller != cnode
29389 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29390 {
29391 prune_unused_types_mark (die, 1);
29392 break;
29393 }
29394 }
29395
29396 if (debug_str_hash)
29397 debug_str_hash->empty ();
29398 if (skeleton_debug_str_hash)
29399 skeleton_debug_str_hash->empty ();
29400 prune_unused_types_prune (comp_unit_die ());
29401 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29402 {
29403 node = *pnode;
29404 if (!node->die->die_mark)
29405 *pnode = node->next;
29406 else
29407 {
29408 prune_unused_types_prune (node->die);
29409 pnode = &node->next;
29410 }
29411 }
29412 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29413 prune_unused_types_prune (ctnode->root_die);
29414
29415 /* Leave the marks clear. */
29416 prune_unmark_dies (comp_unit_die ());
29417 for (node = limbo_die_list; node; node = node->next)
29418 prune_unmark_dies (node->die);
29419 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29420 prune_unmark_dies (ctnode->root_die);
29421 }
29422
29423 /* Helpers to manipulate hash table of comdat type units. */
29424
29425 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29426 {
29427 static inline hashval_t hash (const comdat_type_node *);
29428 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29429 };
29430
29431 inline hashval_t
29432 comdat_type_hasher::hash (const comdat_type_node *type_node)
29433 {
29434 hashval_t h;
29435 memcpy (&h, type_node->signature, sizeof (h));
29436 return h;
29437 }
29438
29439 inline bool
29440 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29441 const comdat_type_node *type_node_2)
29442 {
29443 return (! memcmp (type_node_1->signature, type_node_2->signature,
29444 DWARF_TYPE_SIGNATURE_SIZE));
29445 }
29446
29447 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29448 to the location it would have been added, should we know its
29449 DECL_ASSEMBLER_NAME when we added other attributes. This will
29450 probably improve compactness of debug info, removing equivalent
29451 abbrevs, and hide any differences caused by deferring the
29452 computation of the assembler name, triggered by e.g. PCH. */
29453
29454 static inline void
29455 move_linkage_attr (dw_die_ref die)
29456 {
29457 unsigned ix = vec_safe_length (die->die_attr);
29458 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29459
29460 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29461 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29462
29463 while (--ix > 0)
29464 {
29465 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29466
29467 if (prev->dw_attr == DW_AT_decl_line
29468 || prev->dw_attr == DW_AT_decl_column
29469 || prev->dw_attr == DW_AT_name)
29470 break;
29471 }
29472
29473 if (ix != vec_safe_length (die->die_attr) - 1)
29474 {
29475 die->die_attr->pop ();
29476 die->die_attr->quick_insert (ix, linkage);
29477 }
29478 }
29479
29480 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29481 referenced from typed stack ops and count how often they are used. */
29482
29483 static void
29484 mark_base_types (dw_loc_descr_ref loc)
29485 {
29486 dw_die_ref base_type = NULL;
29487
29488 for (; loc; loc = loc->dw_loc_next)
29489 {
29490 switch (loc->dw_loc_opc)
29491 {
29492 case DW_OP_regval_type:
29493 case DW_OP_deref_type:
29494 case DW_OP_GNU_regval_type:
29495 case DW_OP_GNU_deref_type:
29496 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29497 break;
29498 case DW_OP_convert:
29499 case DW_OP_reinterpret:
29500 case DW_OP_GNU_convert:
29501 case DW_OP_GNU_reinterpret:
29502 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29503 continue;
29504 /* FALLTHRU */
29505 case DW_OP_const_type:
29506 case DW_OP_GNU_const_type:
29507 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29508 break;
29509 case DW_OP_entry_value:
29510 case DW_OP_GNU_entry_value:
29511 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29512 continue;
29513 default:
29514 continue;
29515 }
29516 gcc_assert (base_type->die_parent == comp_unit_die ());
29517 if (base_type->die_mark)
29518 base_type->die_mark++;
29519 else
29520 {
29521 base_types.safe_push (base_type);
29522 base_type->die_mark = 1;
29523 }
29524 }
29525 }
29526
29527 /* Comparison function for sorting marked base types. */
29528
29529 static int
29530 base_type_cmp (const void *x, const void *y)
29531 {
29532 dw_die_ref dx = *(const dw_die_ref *) x;
29533 dw_die_ref dy = *(const dw_die_ref *) y;
29534 unsigned int byte_size1, byte_size2;
29535 unsigned int encoding1, encoding2;
29536 unsigned int align1, align2;
29537 if (dx->die_mark > dy->die_mark)
29538 return -1;
29539 if (dx->die_mark < dy->die_mark)
29540 return 1;
29541 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29542 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29543 if (byte_size1 < byte_size2)
29544 return 1;
29545 if (byte_size1 > byte_size2)
29546 return -1;
29547 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29548 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29549 if (encoding1 < encoding2)
29550 return 1;
29551 if (encoding1 > encoding2)
29552 return -1;
29553 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29554 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29555 if (align1 < align2)
29556 return 1;
29557 if (align1 > align2)
29558 return -1;
29559 return 0;
29560 }
29561
29562 /* Move base types marked by mark_base_types as early as possible
29563 in the CU, sorted by decreasing usage count both to make the
29564 uleb128 references as small as possible and to make sure they
29565 will have die_offset already computed by calc_die_sizes when
29566 sizes of typed stack loc ops is computed. */
29567
29568 static void
29569 move_marked_base_types (void)
29570 {
29571 unsigned int i;
29572 dw_die_ref base_type, die, c;
29573
29574 if (base_types.is_empty ())
29575 return;
29576
29577 /* Sort by decreasing usage count, they will be added again in that
29578 order later on. */
29579 base_types.qsort (base_type_cmp);
29580 die = comp_unit_die ();
29581 c = die->die_child;
29582 do
29583 {
29584 dw_die_ref prev = c;
29585 c = c->die_sib;
29586 while (c->die_mark)
29587 {
29588 remove_child_with_prev (c, prev);
29589 /* As base types got marked, there must be at least
29590 one node other than DW_TAG_base_type. */
29591 gcc_assert (die->die_child != NULL);
29592 c = prev->die_sib;
29593 }
29594 }
29595 while (c != die->die_child);
29596 gcc_assert (die->die_child);
29597 c = die->die_child;
29598 for (i = 0; base_types.iterate (i, &base_type); i++)
29599 {
29600 base_type->die_mark = 0;
29601 base_type->die_sib = c->die_sib;
29602 c->die_sib = base_type;
29603 c = base_type;
29604 }
29605 }
29606
29607 /* Helper function for resolve_addr, attempt to resolve
29608 one CONST_STRING, return true if successful. Similarly verify that
29609 SYMBOL_REFs refer to variables emitted in the current CU. */
29610
29611 static bool
29612 resolve_one_addr (rtx *addr)
29613 {
29614 rtx rtl = *addr;
29615
29616 if (GET_CODE (rtl) == CONST_STRING)
29617 {
29618 size_t len = strlen (XSTR (rtl, 0)) + 1;
29619 tree t = build_string (len, XSTR (rtl, 0));
29620 tree tlen = size_int (len - 1);
29621 TREE_TYPE (t)
29622 = build_array_type (char_type_node, build_index_type (tlen));
29623 rtl = lookup_constant_def (t);
29624 if (!rtl || !MEM_P (rtl))
29625 return false;
29626 rtl = XEXP (rtl, 0);
29627 if (GET_CODE (rtl) == SYMBOL_REF
29628 && SYMBOL_REF_DECL (rtl)
29629 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29630 return false;
29631 vec_safe_push (used_rtx_array, rtl);
29632 *addr = rtl;
29633 return true;
29634 }
29635
29636 if (GET_CODE (rtl) == SYMBOL_REF
29637 && SYMBOL_REF_DECL (rtl))
29638 {
29639 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29640 {
29641 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29642 return false;
29643 }
29644 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29645 return false;
29646 }
29647
29648 if (GET_CODE (rtl) == CONST)
29649 {
29650 subrtx_ptr_iterator::array_type array;
29651 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29652 if (!resolve_one_addr (*iter))
29653 return false;
29654 }
29655
29656 return true;
29657 }
29658
29659 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29660 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29661 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29662
29663 static rtx
29664 string_cst_pool_decl (tree t)
29665 {
29666 rtx rtl = output_constant_def (t, 1);
29667 unsigned char *array;
29668 dw_loc_descr_ref l;
29669 tree decl;
29670 size_t len;
29671 dw_die_ref ref;
29672
29673 if (!rtl || !MEM_P (rtl))
29674 return NULL_RTX;
29675 rtl = XEXP (rtl, 0);
29676 if (GET_CODE (rtl) != SYMBOL_REF
29677 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29678 return NULL_RTX;
29679
29680 decl = SYMBOL_REF_DECL (rtl);
29681 if (!lookup_decl_die (decl))
29682 {
29683 len = TREE_STRING_LENGTH (t);
29684 vec_safe_push (used_rtx_array, rtl);
29685 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29686 array = ggc_vec_alloc<unsigned char> (len);
29687 memcpy (array, TREE_STRING_POINTER (t), len);
29688 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29689 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29690 l->dw_loc_oprnd2.v.val_vec.length = len;
29691 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29692 l->dw_loc_oprnd2.v.val_vec.array = array;
29693 add_AT_loc (ref, DW_AT_location, l);
29694 equate_decl_number_to_die (decl, ref);
29695 }
29696 return rtl;
29697 }
29698
29699 /* Helper function of resolve_addr_in_expr. LOC is
29700 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29701 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29702 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29703 with DW_OP_implicit_pointer if possible
29704 and return true, if unsuccessful, return false. */
29705
29706 static bool
29707 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29708 {
29709 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29710 HOST_WIDE_INT offset = 0;
29711 dw_die_ref ref = NULL;
29712 tree decl;
29713
29714 if (GET_CODE (rtl) == CONST
29715 && GET_CODE (XEXP (rtl, 0)) == PLUS
29716 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29717 {
29718 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29719 rtl = XEXP (XEXP (rtl, 0), 0);
29720 }
29721 if (GET_CODE (rtl) == CONST_STRING)
29722 {
29723 size_t len = strlen (XSTR (rtl, 0)) + 1;
29724 tree t = build_string (len, XSTR (rtl, 0));
29725 tree tlen = size_int (len - 1);
29726
29727 TREE_TYPE (t)
29728 = build_array_type (char_type_node, build_index_type (tlen));
29729 rtl = string_cst_pool_decl (t);
29730 if (!rtl)
29731 return false;
29732 }
29733 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29734 {
29735 decl = SYMBOL_REF_DECL (rtl);
29736 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29737 {
29738 ref = lookup_decl_die (decl);
29739 if (ref && (get_AT (ref, DW_AT_location)
29740 || get_AT (ref, DW_AT_const_value)))
29741 {
29742 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29743 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29744 loc->dw_loc_oprnd1.val_entry = NULL;
29745 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29746 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29747 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29748 loc->dw_loc_oprnd2.v.val_int = offset;
29749 return true;
29750 }
29751 }
29752 }
29753 return false;
29754 }
29755
29756 /* Helper function for resolve_addr, handle one location
29757 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29758 the location list couldn't be resolved. */
29759
29760 static bool
29761 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29762 {
29763 dw_loc_descr_ref keep = NULL;
29764 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29765 switch (loc->dw_loc_opc)
29766 {
29767 case DW_OP_addr:
29768 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29769 {
29770 if ((prev == NULL
29771 || prev->dw_loc_opc == DW_OP_piece
29772 || prev->dw_loc_opc == DW_OP_bit_piece)
29773 && loc->dw_loc_next
29774 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29775 && (!dwarf_strict || dwarf_version >= 5)
29776 && optimize_one_addr_into_implicit_ptr (loc))
29777 break;
29778 return false;
29779 }
29780 break;
29781 case DW_OP_GNU_addr_index:
29782 case DW_OP_addrx:
29783 case DW_OP_GNU_const_index:
29784 case DW_OP_constx:
29785 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29786 || loc->dw_loc_opc == DW_OP_addrx)
29787 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29788 || loc->dw_loc_opc == DW_OP_constx)
29789 && loc->dtprel))
29790 {
29791 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29792 if (!resolve_one_addr (&rtl))
29793 return false;
29794 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29795 loc->dw_loc_oprnd1.val_entry
29796 = add_addr_table_entry (rtl, ate_kind_rtx);
29797 }
29798 break;
29799 case DW_OP_const4u:
29800 case DW_OP_const8u:
29801 if (loc->dtprel
29802 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29803 return false;
29804 break;
29805 case DW_OP_plus_uconst:
29806 if (size_of_loc_descr (loc)
29807 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29808 + 1
29809 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29810 {
29811 dw_loc_descr_ref repl
29812 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29813 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29814 add_loc_descr (&repl, loc->dw_loc_next);
29815 *loc = *repl;
29816 }
29817 break;
29818 case DW_OP_implicit_value:
29819 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29820 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29821 return false;
29822 break;
29823 case DW_OP_implicit_pointer:
29824 case DW_OP_GNU_implicit_pointer:
29825 case DW_OP_GNU_parameter_ref:
29826 case DW_OP_GNU_variable_value:
29827 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29828 {
29829 dw_die_ref ref
29830 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29831 if (ref == NULL)
29832 return false;
29833 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29834 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29835 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29836 }
29837 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29838 {
29839 if (prev == NULL
29840 && loc->dw_loc_next == NULL
29841 && AT_class (a) == dw_val_class_loc)
29842 switch (a->dw_attr)
29843 {
29844 /* Following attributes allow both exprloc and reference,
29845 so if the whole expression is DW_OP_GNU_variable_value
29846 alone we could transform it into reference. */
29847 case DW_AT_byte_size:
29848 case DW_AT_bit_size:
29849 case DW_AT_lower_bound:
29850 case DW_AT_upper_bound:
29851 case DW_AT_bit_stride:
29852 case DW_AT_count:
29853 case DW_AT_allocated:
29854 case DW_AT_associated:
29855 case DW_AT_byte_stride:
29856 a->dw_attr_val.val_class = dw_val_class_die_ref;
29857 a->dw_attr_val.val_entry = NULL;
29858 a->dw_attr_val.v.val_die_ref.die
29859 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29860 a->dw_attr_val.v.val_die_ref.external = 0;
29861 return true;
29862 default:
29863 break;
29864 }
29865 if (dwarf_strict)
29866 return false;
29867 }
29868 break;
29869 case DW_OP_const_type:
29870 case DW_OP_regval_type:
29871 case DW_OP_deref_type:
29872 case DW_OP_convert:
29873 case DW_OP_reinterpret:
29874 case DW_OP_GNU_const_type:
29875 case DW_OP_GNU_regval_type:
29876 case DW_OP_GNU_deref_type:
29877 case DW_OP_GNU_convert:
29878 case DW_OP_GNU_reinterpret:
29879 while (loc->dw_loc_next
29880 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29881 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29882 {
29883 dw_die_ref base1, base2;
29884 unsigned enc1, enc2, size1, size2;
29885 if (loc->dw_loc_opc == DW_OP_regval_type
29886 || loc->dw_loc_opc == DW_OP_deref_type
29887 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29888 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29889 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29890 else if (loc->dw_loc_oprnd1.val_class
29891 == dw_val_class_unsigned_const)
29892 break;
29893 else
29894 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29895 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29896 == dw_val_class_unsigned_const)
29897 break;
29898 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29899 gcc_assert (base1->die_tag == DW_TAG_base_type
29900 && base2->die_tag == DW_TAG_base_type);
29901 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29902 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29903 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29904 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29905 if (size1 == size2
29906 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29907 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29908 && loc != keep)
29909 || enc1 == enc2))
29910 {
29911 /* Optimize away next DW_OP_convert after
29912 adjusting LOC's base type die reference. */
29913 if (loc->dw_loc_opc == DW_OP_regval_type
29914 || loc->dw_loc_opc == DW_OP_deref_type
29915 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29916 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29917 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29918 else
29919 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29920 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29921 continue;
29922 }
29923 /* Don't change integer DW_OP_convert after e.g. floating
29924 point typed stack entry. */
29925 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29926 keep = loc->dw_loc_next;
29927 break;
29928 }
29929 break;
29930 default:
29931 break;
29932 }
29933 return true;
29934 }
29935
29936 /* Helper function of resolve_addr. DIE had DW_AT_location of
29937 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29938 and DW_OP_addr couldn't be resolved. resolve_addr has already
29939 removed the DW_AT_location attribute. This function attempts to
29940 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29941 to it or DW_AT_const_value attribute, if possible. */
29942
29943 static void
29944 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29945 {
29946 if (!VAR_P (decl)
29947 || lookup_decl_die (decl) != die
29948 || DECL_EXTERNAL (decl)
29949 || !TREE_STATIC (decl)
29950 || DECL_INITIAL (decl) == NULL_TREE
29951 || DECL_P (DECL_INITIAL (decl))
29952 || get_AT (die, DW_AT_const_value))
29953 return;
29954
29955 tree init = DECL_INITIAL (decl);
29956 HOST_WIDE_INT offset = 0;
29957 /* For variables that have been optimized away and thus
29958 don't have a memory location, see if we can emit
29959 DW_AT_const_value instead. */
29960 if (tree_add_const_value_attribute (die, init))
29961 return;
29962 if (dwarf_strict && dwarf_version < 5)
29963 return;
29964 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29965 and ADDR_EXPR refers to a decl that has DW_AT_location or
29966 DW_AT_const_value (but isn't addressable, otherwise
29967 resolving the original DW_OP_addr wouldn't fail), see if
29968 we can add DW_OP_implicit_pointer. */
29969 STRIP_NOPS (init);
29970 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29971 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29972 {
29973 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29974 init = TREE_OPERAND (init, 0);
29975 STRIP_NOPS (init);
29976 }
29977 if (TREE_CODE (init) != ADDR_EXPR)
29978 return;
29979 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29980 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
29981 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
29982 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
29983 && TREE_OPERAND (init, 0) != decl))
29984 {
29985 dw_die_ref ref;
29986 dw_loc_descr_ref l;
29987
29988 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
29989 {
29990 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
29991 if (!rtl)
29992 return;
29993 decl = SYMBOL_REF_DECL (rtl);
29994 }
29995 else
29996 decl = TREE_OPERAND (init, 0);
29997 ref = lookup_decl_die (decl);
29998 if (ref == NULL
29999 || (!get_AT (ref, DW_AT_location)
30000 && !get_AT (ref, DW_AT_const_value)))
30001 return;
30002 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30003 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30004 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30005 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30006 add_AT_loc (die, DW_AT_location, l);
30007 }
30008 }
30009
30010 /* Return NULL if l is a DWARF expression, or first op that is not
30011 valid DWARF expression. */
30012
30013 static dw_loc_descr_ref
30014 non_dwarf_expression (dw_loc_descr_ref l)
30015 {
30016 while (l)
30017 {
30018 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30019 return l;
30020 switch (l->dw_loc_opc)
30021 {
30022 case DW_OP_regx:
30023 case DW_OP_implicit_value:
30024 case DW_OP_stack_value:
30025 case DW_OP_implicit_pointer:
30026 case DW_OP_GNU_implicit_pointer:
30027 case DW_OP_GNU_parameter_ref:
30028 case DW_OP_piece:
30029 case DW_OP_bit_piece:
30030 return l;
30031 default:
30032 break;
30033 }
30034 l = l->dw_loc_next;
30035 }
30036 return NULL;
30037 }
30038
30039 /* Return adjusted copy of EXPR:
30040 If it is empty DWARF expression, return it.
30041 If it is valid non-empty DWARF expression,
30042 return copy of EXPR with DW_OP_deref appended to it.
30043 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30044 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30045 If it is DWARF expression followed by DW_OP_stack_value, return
30046 copy of the DWARF expression without anything appended.
30047 Otherwise, return NULL. */
30048
30049 static dw_loc_descr_ref
30050 copy_deref_exprloc (dw_loc_descr_ref expr)
30051 {
30052 dw_loc_descr_ref tail = NULL;
30053
30054 if (expr == NULL)
30055 return NULL;
30056
30057 dw_loc_descr_ref l = non_dwarf_expression (expr);
30058 if (l && l->dw_loc_next)
30059 return NULL;
30060
30061 if (l)
30062 {
30063 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30064 tail = new_loc_descr ((enum dwarf_location_atom)
30065 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30066 0, 0);
30067 else
30068 switch (l->dw_loc_opc)
30069 {
30070 case DW_OP_regx:
30071 tail = new_loc_descr (DW_OP_bregx,
30072 l->dw_loc_oprnd1.v.val_unsigned, 0);
30073 break;
30074 case DW_OP_stack_value:
30075 break;
30076 default:
30077 return NULL;
30078 }
30079 }
30080 else
30081 tail = new_loc_descr (DW_OP_deref, 0, 0);
30082
30083 dw_loc_descr_ref ret = NULL, *p = &ret;
30084 while (expr != l)
30085 {
30086 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30087 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30088 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30089 p = &(*p)->dw_loc_next;
30090 expr = expr->dw_loc_next;
30091 }
30092 *p = tail;
30093 return ret;
30094 }
30095
30096 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30097 reference to a variable or argument, adjust it if needed and return:
30098 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30099 attribute if present should be removed
30100 0 keep the attribute perhaps with minor modifications, no need to rescan
30101 1 if the attribute has been successfully adjusted. */
30102
30103 static int
30104 optimize_string_length (dw_attr_node *a)
30105 {
30106 dw_loc_descr_ref l = AT_loc (a), lv;
30107 dw_die_ref die;
30108 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30109 {
30110 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30111 die = lookup_decl_die (decl);
30112 if (die)
30113 {
30114 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30115 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30116 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30117 }
30118 else
30119 return -1;
30120 }
30121 else
30122 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30123
30124 /* DWARF5 allows reference class, so we can then reference the DIE.
30125 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30126 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30127 {
30128 a->dw_attr_val.val_class = dw_val_class_die_ref;
30129 a->dw_attr_val.val_entry = NULL;
30130 a->dw_attr_val.v.val_die_ref.die = die;
30131 a->dw_attr_val.v.val_die_ref.external = 0;
30132 return 0;
30133 }
30134
30135 dw_attr_node *av = get_AT (die, DW_AT_location);
30136 dw_loc_list_ref d;
30137 bool non_dwarf_expr = false;
30138
30139 if (av == NULL)
30140 return dwarf_strict ? -1 : 0;
30141 switch (AT_class (av))
30142 {
30143 case dw_val_class_loc_list:
30144 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30145 if (d->expr && non_dwarf_expression (d->expr))
30146 non_dwarf_expr = true;
30147 break;
30148 case dw_val_class_view_list:
30149 gcc_unreachable ();
30150 case dw_val_class_loc:
30151 lv = AT_loc (av);
30152 if (lv == NULL)
30153 return dwarf_strict ? -1 : 0;
30154 if (non_dwarf_expression (lv))
30155 non_dwarf_expr = true;
30156 break;
30157 default:
30158 return dwarf_strict ? -1 : 0;
30159 }
30160
30161 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30162 into DW_OP_call4 or DW_OP_GNU_variable_value into
30163 DW_OP_call4 DW_OP_deref, do so. */
30164 if (!non_dwarf_expr
30165 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30166 {
30167 l->dw_loc_opc = DW_OP_call4;
30168 if (l->dw_loc_next)
30169 l->dw_loc_next = NULL;
30170 else
30171 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30172 return 0;
30173 }
30174
30175 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30176 copy over the DW_AT_location attribute from die to a. */
30177 if (l->dw_loc_next != NULL)
30178 {
30179 a->dw_attr_val = av->dw_attr_val;
30180 return 1;
30181 }
30182
30183 dw_loc_list_ref list, *p;
30184 switch (AT_class (av))
30185 {
30186 case dw_val_class_loc_list:
30187 p = &list;
30188 list = NULL;
30189 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30190 {
30191 lv = copy_deref_exprloc (d->expr);
30192 if (lv)
30193 {
30194 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30195 p = &(*p)->dw_loc_next;
30196 }
30197 else if (!dwarf_strict && d->expr)
30198 return 0;
30199 }
30200 if (list == NULL)
30201 return dwarf_strict ? -1 : 0;
30202 a->dw_attr_val.val_class = dw_val_class_loc_list;
30203 gen_llsym (list);
30204 *AT_loc_list_ptr (a) = list;
30205 return 1;
30206 case dw_val_class_loc:
30207 lv = copy_deref_exprloc (AT_loc (av));
30208 if (lv == NULL)
30209 return dwarf_strict ? -1 : 0;
30210 a->dw_attr_val.v.val_loc = lv;
30211 return 1;
30212 default:
30213 gcc_unreachable ();
30214 }
30215 }
30216
30217 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30218 an address in .rodata section if the string literal is emitted there,
30219 or remove the containing location list or replace DW_AT_const_value
30220 with DW_AT_location and empty location expression, if it isn't found
30221 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30222 to something that has been emitted in the current CU. */
30223
30224 static void
30225 resolve_addr (dw_die_ref die)
30226 {
30227 dw_die_ref c;
30228 dw_attr_node *a;
30229 dw_loc_list_ref *curr, *start, loc;
30230 unsigned ix;
30231 bool remove_AT_byte_size = false;
30232
30233 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30234 switch (AT_class (a))
30235 {
30236 case dw_val_class_loc_list:
30237 start = curr = AT_loc_list_ptr (a);
30238 loc = *curr;
30239 gcc_assert (loc);
30240 /* The same list can be referenced more than once. See if we have
30241 already recorded the result from a previous pass. */
30242 if (loc->replaced)
30243 *curr = loc->dw_loc_next;
30244 else if (!loc->resolved_addr)
30245 {
30246 /* As things stand, we do not expect or allow one die to
30247 reference a suffix of another die's location list chain.
30248 References must be identical or completely separate.
30249 There is therefore no need to cache the result of this
30250 pass on any list other than the first; doing so
30251 would lead to unnecessary writes. */
30252 while (*curr)
30253 {
30254 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30255 if (!resolve_addr_in_expr (a, (*curr)->expr))
30256 {
30257 dw_loc_list_ref next = (*curr)->dw_loc_next;
30258 dw_loc_descr_ref l = (*curr)->expr;
30259
30260 if (next && (*curr)->ll_symbol)
30261 {
30262 gcc_assert (!next->ll_symbol);
30263 next->ll_symbol = (*curr)->ll_symbol;
30264 next->vl_symbol = (*curr)->vl_symbol;
30265 }
30266 if (dwarf_split_debug_info)
30267 remove_loc_list_addr_table_entries (l);
30268 *curr = next;
30269 }
30270 else
30271 {
30272 mark_base_types ((*curr)->expr);
30273 curr = &(*curr)->dw_loc_next;
30274 }
30275 }
30276 if (loc == *start)
30277 loc->resolved_addr = 1;
30278 else
30279 {
30280 loc->replaced = 1;
30281 loc->dw_loc_next = *start;
30282 }
30283 }
30284 if (!*start)
30285 {
30286 remove_AT (die, a->dw_attr);
30287 ix--;
30288 }
30289 break;
30290 case dw_val_class_view_list:
30291 {
30292 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30293 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30294 dw_val_node *llnode
30295 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30296 /* If we no longer have a loclist, or it no longer needs
30297 views, drop this attribute. */
30298 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30299 {
30300 remove_AT (die, a->dw_attr);
30301 ix--;
30302 }
30303 break;
30304 }
30305 case dw_val_class_loc:
30306 {
30307 dw_loc_descr_ref l = AT_loc (a);
30308 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30309 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30310 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30311 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30312 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30313 with DW_FORM_ref referencing the same DIE as
30314 DW_OP_GNU_variable_value used to reference. */
30315 if (a->dw_attr == DW_AT_string_length
30316 && l
30317 && l->dw_loc_opc == DW_OP_GNU_variable_value
30318 && (l->dw_loc_next == NULL
30319 || (l->dw_loc_next->dw_loc_next == NULL
30320 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30321 {
30322 switch (optimize_string_length (a))
30323 {
30324 case -1:
30325 remove_AT (die, a->dw_attr);
30326 ix--;
30327 /* If we drop DW_AT_string_length, we need to drop also
30328 DW_AT_{string_length_,}byte_size. */
30329 remove_AT_byte_size = true;
30330 continue;
30331 default:
30332 break;
30333 case 1:
30334 /* Even if we keep the optimized DW_AT_string_length,
30335 it might have changed AT_class, so process it again. */
30336 ix--;
30337 continue;
30338 }
30339 }
30340 /* For -gdwarf-2 don't attempt to optimize
30341 DW_AT_data_member_location containing
30342 DW_OP_plus_uconst - older consumers might
30343 rely on it being that op instead of a more complex,
30344 but shorter, location description. */
30345 if ((dwarf_version > 2
30346 || a->dw_attr != DW_AT_data_member_location
30347 || l == NULL
30348 || l->dw_loc_opc != DW_OP_plus_uconst
30349 || l->dw_loc_next != NULL)
30350 && !resolve_addr_in_expr (a, l))
30351 {
30352 if (dwarf_split_debug_info)
30353 remove_loc_list_addr_table_entries (l);
30354 if (l != NULL
30355 && l->dw_loc_next == NULL
30356 && l->dw_loc_opc == DW_OP_addr
30357 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30358 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30359 && a->dw_attr == DW_AT_location)
30360 {
30361 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30362 remove_AT (die, a->dw_attr);
30363 ix--;
30364 optimize_location_into_implicit_ptr (die, decl);
30365 break;
30366 }
30367 if (a->dw_attr == DW_AT_string_length)
30368 /* If we drop DW_AT_string_length, we need to drop also
30369 DW_AT_{string_length_,}byte_size. */
30370 remove_AT_byte_size = true;
30371 remove_AT (die, a->dw_attr);
30372 ix--;
30373 }
30374 else
30375 mark_base_types (l);
30376 }
30377 break;
30378 case dw_val_class_addr:
30379 if (a->dw_attr == DW_AT_const_value
30380 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30381 {
30382 if (AT_index (a) != NOT_INDEXED)
30383 remove_addr_table_entry (a->dw_attr_val.val_entry);
30384 remove_AT (die, a->dw_attr);
30385 ix--;
30386 }
30387 if ((die->die_tag == DW_TAG_call_site
30388 && a->dw_attr == DW_AT_call_origin)
30389 || (die->die_tag == DW_TAG_GNU_call_site
30390 && a->dw_attr == DW_AT_abstract_origin))
30391 {
30392 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30393 dw_die_ref tdie = lookup_decl_die (tdecl);
30394 dw_die_ref cdie;
30395 if (tdie == NULL
30396 && DECL_EXTERNAL (tdecl)
30397 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30398 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30399 {
30400 dw_die_ref pdie = cdie;
30401 /* Make sure we don't add these DIEs into type units.
30402 We could emit skeleton DIEs for context (namespaces,
30403 outer structs/classes) and a skeleton DIE for the
30404 innermost context with DW_AT_signature pointing to the
30405 type unit. See PR78835. */
30406 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30407 pdie = pdie->die_parent;
30408 if (pdie == NULL)
30409 {
30410 /* Creating a full DIE for tdecl is overly expensive and
30411 at this point even wrong when in the LTO phase
30412 as it can end up generating new type DIEs we didn't
30413 output and thus optimize_external_refs will crash. */
30414 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30415 add_AT_flag (tdie, DW_AT_external, 1);
30416 add_AT_flag (tdie, DW_AT_declaration, 1);
30417 add_linkage_attr (tdie, tdecl);
30418 add_name_and_src_coords_attributes (tdie, tdecl, true);
30419 equate_decl_number_to_die (tdecl, tdie);
30420 }
30421 }
30422 if (tdie)
30423 {
30424 a->dw_attr_val.val_class = dw_val_class_die_ref;
30425 a->dw_attr_val.v.val_die_ref.die = tdie;
30426 a->dw_attr_val.v.val_die_ref.external = 0;
30427 }
30428 else
30429 {
30430 if (AT_index (a) != NOT_INDEXED)
30431 remove_addr_table_entry (a->dw_attr_val.val_entry);
30432 remove_AT (die, a->dw_attr);
30433 ix--;
30434 }
30435 }
30436 break;
30437 default:
30438 break;
30439 }
30440
30441 if (remove_AT_byte_size)
30442 remove_AT (die, dwarf_version >= 5
30443 ? DW_AT_string_length_byte_size
30444 : DW_AT_byte_size);
30445
30446 FOR_EACH_CHILD (die, c, resolve_addr (c));
30447 }
30448 \f
30449 /* Helper routines for optimize_location_lists.
30450 This pass tries to share identical local lists in .debug_loc
30451 section. */
30452
30453 /* Iteratively hash operands of LOC opcode into HSTATE. */
30454
30455 static void
30456 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30457 {
30458 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30459 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30460
30461 switch (loc->dw_loc_opc)
30462 {
30463 case DW_OP_const4u:
30464 case DW_OP_const8u:
30465 if (loc->dtprel)
30466 goto hash_addr;
30467 /* FALLTHRU */
30468 case DW_OP_const1u:
30469 case DW_OP_const1s:
30470 case DW_OP_const2u:
30471 case DW_OP_const2s:
30472 case DW_OP_const4s:
30473 case DW_OP_const8s:
30474 case DW_OP_constu:
30475 case DW_OP_consts:
30476 case DW_OP_pick:
30477 case DW_OP_plus_uconst:
30478 case DW_OP_breg0:
30479 case DW_OP_breg1:
30480 case DW_OP_breg2:
30481 case DW_OP_breg3:
30482 case DW_OP_breg4:
30483 case DW_OP_breg5:
30484 case DW_OP_breg6:
30485 case DW_OP_breg7:
30486 case DW_OP_breg8:
30487 case DW_OP_breg9:
30488 case DW_OP_breg10:
30489 case DW_OP_breg11:
30490 case DW_OP_breg12:
30491 case DW_OP_breg13:
30492 case DW_OP_breg14:
30493 case DW_OP_breg15:
30494 case DW_OP_breg16:
30495 case DW_OP_breg17:
30496 case DW_OP_breg18:
30497 case DW_OP_breg19:
30498 case DW_OP_breg20:
30499 case DW_OP_breg21:
30500 case DW_OP_breg22:
30501 case DW_OP_breg23:
30502 case DW_OP_breg24:
30503 case DW_OP_breg25:
30504 case DW_OP_breg26:
30505 case DW_OP_breg27:
30506 case DW_OP_breg28:
30507 case DW_OP_breg29:
30508 case DW_OP_breg30:
30509 case DW_OP_breg31:
30510 case DW_OP_regx:
30511 case DW_OP_fbreg:
30512 case DW_OP_piece:
30513 case DW_OP_deref_size:
30514 case DW_OP_xderef_size:
30515 hstate.add_object (val1->v.val_int);
30516 break;
30517 case DW_OP_skip:
30518 case DW_OP_bra:
30519 {
30520 int offset;
30521
30522 gcc_assert (val1->val_class == dw_val_class_loc);
30523 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30524 hstate.add_object (offset);
30525 }
30526 break;
30527 case DW_OP_implicit_value:
30528 hstate.add_object (val1->v.val_unsigned);
30529 switch (val2->val_class)
30530 {
30531 case dw_val_class_const:
30532 hstate.add_object (val2->v.val_int);
30533 break;
30534 case dw_val_class_vec:
30535 {
30536 unsigned int elt_size = val2->v.val_vec.elt_size;
30537 unsigned int len = val2->v.val_vec.length;
30538
30539 hstate.add_int (elt_size);
30540 hstate.add_int (len);
30541 hstate.add (val2->v.val_vec.array, len * elt_size);
30542 }
30543 break;
30544 case dw_val_class_const_double:
30545 hstate.add_object (val2->v.val_double.low);
30546 hstate.add_object (val2->v.val_double.high);
30547 break;
30548 case dw_val_class_wide_int:
30549 hstate.add (val2->v.val_wide->get_val (),
30550 get_full_len (*val2->v.val_wide)
30551 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30552 break;
30553 case dw_val_class_addr:
30554 inchash::add_rtx (val2->v.val_addr, hstate);
30555 break;
30556 default:
30557 gcc_unreachable ();
30558 }
30559 break;
30560 case DW_OP_bregx:
30561 case DW_OP_bit_piece:
30562 hstate.add_object (val1->v.val_int);
30563 hstate.add_object (val2->v.val_int);
30564 break;
30565 case DW_OP_addr:
30566 hash_addr:
30567 if (loc->dtprel)
30568 {
30569 unsigned char dtprel = 0xd1;
30570 hstate.add_object (dtprel);
30571 }
30572 inchash::add_rtx (val1->v.val_addr, hstate);
30573 break;
30574 case DW_OP_GNU_addr_index:
30575 case DW_OP_addrx:
30576 case DW_OP_GNU_const_index:
30577 case DW_OP_constx:
30578 {
30579 if (loc->dtprel)
30580 {
30581 unsigned char dtprel = 0xd1;
30582 hstate.add_object (dtprel);
30583 }
30584 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30585 }
30586 break;
30587 case DW_OP_implicit_pointer:
30588 case DW_OP_GNU_implicit_pointer:
30589 hstate.add_int (val2->v.val_int);
30590 break;
30591 case DW_OP_entry_value:
30592 case DW_OP_GNU_entry_value:
30593 hstate.add_object (val1->v.val_loc);
30594 break;
30595 case DW_OP_regval_type:
30596 case DW_OP_deref_type:
30597 case DW_OP_GNU_regval_type:
30598 case DW_OP_GNU_deref_type:
30599 {
30600 unsigned int byte_size
30601 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30602 unsigned int encoding
30603 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30604 hstate.add_object (val1->v.val_int);
30605 hstate.add_object (byte_size);
30606 hstate.add_object (encoding);
30607 }
30608 break;
30609 case DW_OP_convert:
30610 case DW_OP_reinterpret:
30611 case DW_OP_GNU_convert:
30612 case DW_OP_GNU_reinterpret:
30613 if (val1->val_class == dw_val_class_unsigned_const)
30614 {
30615 hstate.add_object (val1->v.val_unsigned);
30616 break;
30617 }
30618 /* FALLTHRU */
30619 case DW_OP_const_type:
30620 case DW_OP_GNU_const_type:
30621 {
30622 unsigned int byte_size
30623 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30624 unsigned int encoding
30625 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30626 hstate.add_object (byte_size);
30627 hstate.add_object (encoding);
30628 if (loc->dw_loc_opc != DW_OP_const_type
30629 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30630 break;
30631 hstate.add_object (val2->val_class);
30632 switch (val2->val_class)
30633 {
30634 case dw_val_class_const:
30635 hstate.add_object (val2->v.val_int);
30636 break;
30637 case dw_val_class_vec:
30638 {
30639 unsigned int elt_size = val2->v.val_vec.elt_size;
30640 unsigned int len = val2->v.val_vec.length;
30641
30642 hstate.add_object (elt_size);
30643 hstate.add_object (len);
30644 hstate.add (val2->v.val_vec.array, len * elt_size);
30645 }
30646 break;
30647 case dw_val_class_const_double:
30648 hstate.add_object (val2->v.val_double.low);
30649 hstate.add_object (val2->v.val_double.high);
30650 break;
30651 case dw_val_class_wide_int:
30652 hstate.add (val2->v.val_wide->get_val (),
30653 get_full_len (*val2->v.val_wide)
30654 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30655 break;
30656 default:
30657 gcc_unreachable ();
30658 }
30659 }
30660 break;
30661
30662 default:
30663 /* Other codes have no operands. */
30664 break;
30665 }
30666 }
30667
30668 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30669
30670 static inline void
30671 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30672 {
30673 dw_loc_descr_ref l;
30674 bool sizes_computed = false;
30675 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30676 size_of_locs (loc);
30677
30678 for (l = loc; l != NULL; l = l->dw_loc_next)
30679 {
30680 enum dwarf_location_atom opc = l->dw_loc_opc;
30681 hstate.add_object (opc);
30682 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30683 {
30684 size_of_locs (loc);
30685 sizes_computed = true;
30686 }
30687 hash_loc_operands (l, hstate);
30688 }
30689 }
30690
30691 /* Compute hash of the whole location list LIST_HEAD. */
30692
30693 static inline void
30694 hash_loc_list (dw_loc_list_ref list_head)
30695 {
30696 dw_loc_list_ref curr = list_head;
30697 inchash::hash hstate;
30698
30699 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30700 {
30701 hstate.add (curr->begin, strlen (curr->begin) + 1);
30702 hstate.add (curr->end, strlen (curr->end) + 1);
30703 hstate.add_object (curr->vbegin);
30704 hstate.add_object (curr->vend);
30705 if (curr->section)
30706 hstate.add (curr->section, strlen (curr->section) + 1);
30707 hash_locs (curr->expr, hstate);
30708 }
30709 list_head->hash = hstate.end ();
30710 }
30711
30712 /* Return true if X and Y opcodes have the same operands. */
30713
30714 static inline bool
30715 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30716 {
30717 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30718 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30719 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30720 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30721
30722 switch (x->dw_loc_opc)
30723 {
30724 case DW_OP_const4u:
30725 case DW_OP_const8u:
30726 if (x->dtprel)
30727 goto hash_addr;
30728 /* FALLTHRU */
30729 case DW_OP_const1u:
30730 case DW_OP_const1s:
30731 case DW_OP_const2u:
30732 case DW_OP_const2s:
30733 case DW_OP_const4s:
30734 case DW_OP_const8s:
30735 case DW_OP_constu:
30736 case DW_OP_consts:
30737 case DW_OP_pick:
30738 case DW_OP_plus_uconst:
30739 case DW_OP_breg0:
30740 case DW_OP_breg1:
30741 case DW_OP_breg2:
30742 case DW_OP_breg3:
30743 case DW_OP_breg4:
30744 case DW_OP_breg5:
30745 case DW_OP_breg6:
30746 case DW_OP_breg7:
30747 case DW_OP_breg8:
30748 case DW_OP_breg9:
30749 case DW_OP_breg10:
30750 case DW_OP_breg11:
30751 case DW_OP_breg12:
30752 case DW_OP_breg13:
30753 case DW_OP_breg14:
30754 case DW_OP_breg15:
30755 case DW_OP_breg16:
30756 case DW_OP_breg17:
30757 case DW_OP_breg18:
30758 case DW_OP_breg19:
30759 case DW_OP_breg20:
30760 case DW_OP_breg21:
30761 case DW_OP_breg22:
30762 case DW_OP_breg23:
30763 case DW_OP_breg24:
30764 case DW_OP_breg25:
30765 case DW_OP_breg26:
30766 case DW_OP_breg27:
30767 case DW_OP_breg28:
30768 case DW_OP_breg29:
30769 case DW_OP_breg30:
30770 case DW_OP_breg31:
30771 case DW_OP_regx:
30772 case DW_OP_fbreg:
30773 case DW_OP_piece:
30774 case DW_OP_deref_size:
30775 case DW_OP_xderef_size:
30776 return valx1->v.val_int == valy1->v.val_int;
30777 case DW_OP_skip:
30778 case DW_OP_bra:
30779 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30780 can cause irrelevant differences in dw_loc_addr. */
30781 gcc_assert (valx1->val_class == dw_val_class_loc
30782 && valy1->val_class == dw_val_class_loc
30783 && (dwarf_split_debug_info
30784 || x->dw_loc_addr == y->dw_loc_addr));
30785 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30786 case DW_OP_implicit_value:
30787 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30788 || valx2->val_class != valy2->val_class)
30789 return false;
30790 switch (valx2->val_class)
30791 {
30792 case dw_val_class_const:
30793 return valx2->v.val_int == valy2->v.val_int;
30794 case dw_val_class_vec:
30795 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30796 && valx2->v.val_vec.length == valy2->v.val_vec.length
30797 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30798 valx2->v.val_vec.elt_size
30799 * valx2->v.val_vec.length) == 0;
30800 case dw_val_class_const_double:
30801 return valx2->v.val_double.low == valy2->v.val_double.low
30802 && valx2->v.val_double.high == valy2->v.val_double.high;
30803 case dw_val_class_wide_int:
30804 return *valx2->v.val_wide == *valy2->v.val_wide;
30805 case dw_val_class_addr:
30806 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30807 default:
30808 gcc_unreachable ();
30809 }
30810 case DW_OP_bregx:
30811 case DW_OP_bit_piece:
30812 return valx1->v.val_int == valy1->v.val_int
30813 && valx2->v.val_int == valy2->v.val_int;
30814 case DW_OP_addr:
30815 hash_addr:
30816 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30817 case DW_OP_GNU_addr_index:
30818 case DW_OP_addrx:
30819 case DW_OP_GNU_const_index:
30820 case DW_OP_constx:
30821 {
30822 rtx ax1 = valx1->val_entry->addr.rtl;
30823 rtx ay1 = valy1->val_entry->addr.rtl;
30824 return rtx_equal_p (ax1, ay1);
30825 }
30826 case DW_OP_implicit_pointer:
30827 case DW_OP_GNU_implicit_pointer:
30828 return valx1->val_class == dw_val_class_die_ref
30829 && valx1->val_class == valy1->val_class
30830 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30831 && valx2->v.val_int == valy2->v.val_int;
30832 case DW_OP_entry_value:
30833 case DW_OP_GNU_entry_value:
30834 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30835 case DW_OP_const_type:
30836 case DW_OP_GNU_const_type:
30837 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30838 || valx2->val_class != valy2->val_class)
30839 return false;
30840 switch (valx2->val_class)
30841 {
30842 case dw_val_class_const:
30843 return valx2->v.val_int == valy2->v.val_int;
30844 case dw_val_class_vec:
30845 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30846 && valx2->v.val_vec.length == valy2->v.val_vec.length
30847 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30848 valx2->v.val_vec.elt_size
30849 * valx2->v.val_vec.length) == 0;
30850 case dw_val_class_const_double:
30851 return valx2->v.val_double.low == valy2->v.val_double.low
30852 && valx2->v.val_double.high == valy2->v.val_double.high;
30853 case dw_val_class_wide_int:
30854 return *valx2->v.val_wide == *valy2->v.val_wide;
30855 default:
30856 gcc_unreachable ();
30857 }
30858 case DW_OP_regval_type:
30859 case DW_OP_deref_type:
30860 case DW_OP_GNU_regval_type:
30861 case DW_OP_GNU_deref_type:
30862 return valx1->v.val_int == valy1->v.val_int
30863 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30864 case DW_OP_convert:
30865 case DW_OP_reinterpret:
30866 case DW_OP_GNU_convert:
30867 case DW_OP_GNU_reinterpret:
30868 if (valx1->val_class != valy1->val_class)
30869 return false;
30870 if (valx1->val_class == dw_val_class_unsigned_const)
30871 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30872 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30873 case DW_OP_GNU_parameter_ref:
30874 return valx1->val_class == dw_val_class_die_ref
30875 && valx1->val_class == valy1->val_class
30876 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30877 default:
30878 /* Other codes have no operands. */
30879 return true;
30880 }
30881 }
30882
30883 /* Return true if DWARF location expressions X and Y are the same. */
30884
30885 static inline bool
30886 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30887 {
30888 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30889 if (x->dw_loc_opc != y->dw_loc_opc
30890 || x->dtprel != y->dtprel
30891 || !compare_loc_operands (x, y))
30892 break;
30893 return x == NULL && y == NULL;
30894 }
30895
30896 /* Hashtable helpers. */
30897
30898 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30899 {
30900 static inline hashval_t hash (const dw_loc_list_struct *);
30901 static inline bool equal (const dw_loc_list_struct *,
30902 const dw_loc_list_struct *);
30903 };
30904
30905 /* Return precomputed hash of location list X. */
30906
30907 inline hashval_t
30908 loc_list_hasher::hash (const dw_loc_list_struct *x)
30909 {
30910 return x->hash;
30911 }
30912
30913 /* Return true if location lists A and B are the same. */
30914
30915 inline bool
30916 loc_list_hasher::equal (const dw_loc_list_struct *a,
30917 const dw_loc_list_struct *b)
30918 {
30919 if (a == b)
30920 return 1;
30921 if (a->hash != b->hash)
30922 return 0;
30923 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30924 if (strcmp (a->begin, b->begin) != 0
30925 || strcmp (a->end, b->end) != 0
30926 || (a->section == NULL) != (b->section == NULL)
30927 || (a->section && strcmp (a->section, b->section) != 0)
30928 || a->vbegin != b->vbegin || a->vend != b->vend
30929 || !compare_locs (a->expr, b->expr))
30930 break;
30931 return a == NULL && b == NULL;
30932 }
30933
30934 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30935
30936
30937 /* Recursively optimize location lists referenced from DIE
30938 children and share them whenever possible. */
30939
30940 static void
30941 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30942 {
30943 dw_die_ref c;
30944 dw_attr_node *a;
30945 unsigned ix;
30946 dw_loc_list_struct **slot;
30947 bool drop_locviews = false;
30948 bool has_locviews = false;
30949
30950 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30951 if (AT_class (a) == dw_val_class_loc_list)
30952 {
30953 dw_loc_list_ref list = AT_loc_list (a);
30954 /* TODO: perform some optimizations here, before hashing
30955 it and storing into the hash table. */
30956 hash_loc_list (list);
30957 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30958 if (*slot == NULL)
30959 {
30960 *slot = list;
30961 if (loc_list_has_views (list))
30962 gcc_assert (list->vl_symbol);
30963 else if (list->vl_symbol)
30964 {
30965 drop_locviews = true;
30966 list->vl_symbol = NULL;
30967 }
30968 }
30969 else
30970 {
30971 if (list->vl_symbol && !(*slot)->vl_symbol)
30972 drop_locviews = true;
30973 a->dw_attr_val.v.val_loc_list = *slot;
30974 }
30975 }
30976 else if (AT_class (a) == dw_val_class_view_list)
30977 {
30978 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30979 has_locviews = true;
30980 }
30981
30982
30983 if (drop_locviews && has_locviews)
30984 remove_AT (die, DW_AT_GNU_locviews);
30985
30986 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
30987 }
30988
30989
30990 /* Recursively assign each location list a unique index into the debug_addr
30991 section. */
30992
30993 static void
30994 index_location_lists (dw_die_ref die)
30995 {
30996 dw_die_ref c;
30997 dw_attr_node *a;
30998 unsigned ix;
30999
31000 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31001 if (AT_class (a) == dw_val_class_loc_list)
31002 {
31003 dw_loc_list_ref list = AT_loc_list (a);
31004 dw_loc_list_ref curr;
31005 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31006 {
31007 /* Don't index an entry that has already been indexed
31008 or won't be output. Make sure skip_loc_list_entry doesn't
31009 call size_of_locs, because that might cause circular dependency,
31010 index_location_lists requiring address table indexes to be
31011 computed, but adding new indexes through add_addr_table_entry
31012 and address table index computation requiring no new additions
31013 to the hash table. In the rare case of DWARF[234] >= 64KB
31014 location expression, we'll just waste unused address table entry
31015 for it. */
31016 if (curr->begin_entry != NULL
31017 || skip_loc_list_entry (curr))
31018 continue;
31019
31020 curr->begin_entry
31021 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31022 }
31023 }
31024
31025 FOR_EACH_CHILD (die, c, index_location_lists (c));
31026 }
31027
31028 /* Optimize location lists referenced from DIE
31029 children and share them whenever possible. */
31030
31031 static void
31032 optimize_location_lists (dw_die_ref die)
31033 {
31034 loc_list_hash_type htab (500);
31035 optimize_location_lists_1 (die, &htab);
31036 }
31037 \f
31038 /* Traverse the limbo die list, and add parent/child links. The only
31039 dies without parents that should be here are concrete instances of
31040 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31041 For concrete instances, we can get the parent die from the abstract
31042 instance. */
31043
31044 static void
31045 flush_limbo_die_list (void)
31046 {
31047 limbo_die_node *node;
31048
31049 /* get_context_die calls force_decl_die, which can put new DIEs on the
31050 limbo list in LTO mode when nested functions are put in a different
31051 partition than that of their parent function. */
31052 while ((node = limbo_die_list))
31053 {
31054 dw_die_ref die = node->die;
31055 limbo_die_list = node->next;
31056
31057 if (die->die_parent == NULL)
31058 {
31059 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31060
31061 if (origin && origin->die_parent)
31062 add_child_die (origin->die_parent, die);
31063 else if (is_cu_die (die))
31064 ;
31065 else if (seen_error ())
31066 /* It's OK to be confused by errors in the input. */
31067 add_child_die (comp_unit_die (), die);
31068 else
31069 {
31070 /* In certain situations, the lexical block containing a
31071 nested function can be optimized away, which results
31072 in the nested function die being orphaned. Likewise
31073 with the return type of that nested function. Force
31074 this to be a child of the containing function.
31075
31076 It may happen that even the containing function got fully
31077 inlined and optimized out. In that case we are lost and
31078 assign the empty child. This should not be big issue as
31079 the function is likely unreachable too. */
31080 gcc_assert (node->created_for);
31081
31082 if (DECL_P (node->created_for))
31083 origin = get_context_die (DECL_CONTEXT (node->created_for));
31084 else if (TYPE_P (node->created_for))
31085 origin = scope_die_for (node->created_for, comp_unit_die ());
31086 else
31087 origin = comp_unit_die ();
31088
31089 add_child_die (origin, die);
31090 }
31091 }
31092 }
31093 }
31094
31095 /* Reset DIEs so we can output them again. */
31096
31097 static void
31098 reset_dies (dw_die_ref die)
31099 {
31100 dw_die_ref c;
31101
31102 /* Remove stuff we re-generate. */
31103 die->die_mark = 0;
31104 die->die_offset = 0;
31105 die->die_abbrev = 0;
31106 remove_AT (die, DW_AT_sibling);
31107
31108 FOR_EACH_CHILD (die, c, reset_dies (c));
31109 }
31110
31111 /* Output stuff that dwarf requires at the end of every file,
31112 and generate the DWARF-2 debugging info. */
31113
31114 static void
31115 dwarf2out_finish (const char *filename)
31116 {
31117 comdat_type_node *ctnode;
31118 dw_die_ref main_comp_unit_die;
31119 unsigned char checksum[16];
31120 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31121
31122 /* Flush out any latecomers to the limbo party. */
31123 flush_limbo_die_list ();
31124
31125 if (inline_entry_data_table)
31126 gcc_assert (inline_entry_data_table->elements () == 0);
31127
31128 if (flag_checking)
31129 {
31130 verify_die (comp_unit_die ());
31131 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31132 verify_die (node->die);
31133 }
31134
31135 /* We shouldn't have any symbols with delayed asm names for
31136 DIEs generated after early finish. */
31137 gcc_assert (deferred_asm_name == NULL);
31138
31139 gen_remaining_tmpl_value_param_die_attribute ();
31140
31141 if (flag_generate_lto || flag_generate_offload)
31142 {
31143 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31144
31145 /* Prune stuff so that dwarf2out_finish runs successfully
31146 for the fat part of the object. */
31147 reset_dies (comp_unit_die ());
31148 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31149 reset_dies (node->die);
31150
31151 hash_table<comdat_type_hasher> comdat_type_table (100);
31152 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31153 {
31154 comdat_type_node **slot
31155 = comdat_type_table.find_slot (ctnode, INSERT);
31156
31157 /* Don't reset types twice. */
31158 if (*slot != HTAB_EMPTY_ENTRY)
31159 continue;
31160
31161 /* Remove the pointer to the line table. */
31162 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31163
31164 if (debug_info_level >= DINFO_LEVEL_TERSE)
31165 reset_dies (ctnode->root_die);
31166
31167 *slot = ctnode;
31168 }
31169
31170 /* Reset die CU symbol so we don't output it twice. */
31171 comp_unit_die ()->die_id.die_symbol = NULL;
31172
31173 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31174 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31175 if (have_macinfo)
31176 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31177
31178 /* Remove indirect string decisions. */
31179 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31180 if (debug_line_str_hash)
31181 {
31182 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31183 debug_line_str_hash = NULL;
31184 }
31185 }
31186
31187 #if ENABLE_ASSERT_CHECKING
31188 {
31189 dw_die_ref die = comp_unit_die (), c;
31190 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31191 }
31192 #endif
31193 resolve_addr (comp_unit_die ());
31194 move_marked_base_types ();
31195
31196 if (dump_file)
31197 {
31198 fprintf (dump_file, "DWARF for %s\n", filename);
31199 print_die (comp_unit_die (), dump_file);
31200 }
31201
31202 /* Initialize sections and labels used for actual assembler output. */
31203 unsigned generation = init_sections_and_labels (false);
31204
31205 /* Traverse the DIE's and add sibling attributes to those DIE's that
31206 have children. */
31207 add_sibling_attributes (comp_unit_die ());
31208 limbo_die_node *node;
31209 for (node = cu_die_list; node; node = node->next)
31210 add_sibling_attributes (node->die);
31211 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31212 add_sibling_attributes (ctnode->root_die);
31213
31214 /* When splitting DWARF info, we put some attributes in the
31215 skeleton compile_unit DIE that remains in the .o, while
31216 most attributes go in the DWO compile_unit_die. */
31217 if (dwarf_split_debug_info)
31218 {
31219 limbo_die_node *cu;
31220 main_comp_unit_die = gen_compile_unit_die (NULL);
31221 if (dwarf_version >= 5)
31222 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31223 cu = limbo_die_list;
31224 gcc_assert (cu->die == main_comp_unit_die);
31225 limbo_die_list = limbo_die_list->next;
31226 cu->next = cu_die_list;
31227 cu_die_list = cu;
31228 }
31229 else
31230 main_comp_unit_die = comp_unit_die ();
31231
31232 /* Output a terminator label for the .text section. */
31233 switch_to_section (text_section);
31234 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31235 if (cold_text_section)
31236 {
31237 switch_to_section (cold_text_section);
31238 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31239 }
31240
31241 /* We can only use the low/high_pc attributes if all of the code was
31242 in .text. */
31243 if (!have_multiple_function_sections
31244 || (dwarf_version < 3 && dwarf_strict))
31245 {
31246 /* Don't add if the CU has no associated code. */
31247 if (text_section_used)
31248 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31249 text_end_label, true);
31250 }
31251 else
31252 {
31253 unsigned fde_idx;
31254 dw_fde_ref fde;
31255 bool range_list_added = false;
31256
31257 if (text_section_used)
31258 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31259 text_end_label, &range_list_added, true);
31260 if (cold_text_section_used)
31261 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31262 cold_end_label, &range_list_added, true);
31263
31264 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31265 {
31266 if (DECL_IGNORED_P (fde->decl))
31267 continue;
31268 if (!fde->in_std_section)
31269 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31270 fde->dw_fde_end, &range_list_added,
31271 true);
31272 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31273 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31274 fde->dw_fde_second_end, &range_list_added,
31275 true);
31276 }
31277
31278 if (range_list_added)
31279 {
31280 /* We need to give .debug_loc and .debug_ranges an appropriate
31281 "base address". Use zero so that these addresses become
31282 absolute. Historically, we've emitted the unexpected
31283 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31284 Emit both to give time for other tools to adapt. */
31285 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31286 if (! dwarf_strict && dwarf_version < 4)
31287 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31288
31289 add_ranges (NULL);
31290 }
31291 }
31292
31293 /* AIX Assembler inserts the length, so adjust the reference to match the
31294 offset expected by debuggers. */
31295 strcpy (dl_section_ref, debug_line_section_label);
31296 if (XCOFF_DEBUGGING_INFO)
31297 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31298
31299 if (debug_info_level >= DINFO_LEVEL_TERSE)
31300 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31301 dl_section_ref);
31302
31303 if (have_macinfo)
31304 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31305 macinfo_section_label);
31306
31307 if (dwarf_split_debug_info)
31308 {
31309 if (have_location_lists)
31310 {
31311 /* Since we generate the loclists in the split DWARF .dwo
31312 file itself, we don't need to generate a loclists_base
31313 attribute for the split compile unit DIE. That attribute
31314 (and using relocatable sec_offset FORMs) isn't allowed
31315 for a split compile unit. Only if the .debug_loclists
31316 section was in the main file, would we need to generate a
31317 loclists_base attribute here (for the full or skeleton
31318 unit DIE). */
31319
31320 /* optimize_location_lists calculates the size of the lists,
31321 so index them first, and assign indices to the entries.
31322 Although optimize_location_lists will remove entries from
31323 the table, it only does so for duplicates, and therefore
31324 only reduces ref_counts to 1. */
31325 index_location_lists (comp_unit_die ());
31326 }
31327
31328 if (addr_index_table != NULL)
31329 {
31330 unsigned int index = 0;
31331 addr_index_table
31332 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31333 (&index);
31334 }
31335 }
31336
31337 loc_list_idx = 0;
31338 if (have_location_lists)
31339 {
31340 optimize_location_lists (comp_unit_die ());
31341 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31342 if (dwarf_version >= 5 && dwarf_split_debug_info)
31343 assign_location_list_indexes (comp_unit_die ());
31344 }
31345
31346 save_macinfo_strings ();
31347
31348 if (dwarf_split_debug_info)
31349 {
31350 unsigned int index = 0;
31351
31352 /* Add attributes common to skeleton compile_units and
31353 type_units. Because these attributes include strings, it
31354 must be done before freezing the string table. Top-level
31355 skeleton die attrs are added when the skeleton type unit is
31356 created, so ensure it is created by this point. */
31357 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31358 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31359 }
31360
31361 /* Output all of the compilation units. We put the main one last so that
31362 the offsets are available to output_pubnames. */
31363 for (node = cu_die_list; node; node = node->next)
31364 output_comp_unit (node->die, 0, NULL);
31365
31366 hash_table<comdat_type_hasher> comdat_type_table (100);
31367 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31368 {
31369 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31370
31371 /* Don't output duplicate types. */
31372 if (*slot != HTAB_EMPTY_ENTRY)
31373 continue;
31374
31375 /* Add a pointer to the line table for the main compilation unit
31376 so that the debugger can make sense of DW_AT_decl_file
31377 attributes. */
31378 if (debug_info_level >= DINFO_LEVEL_TERSE)
31379 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31380 (!dwarf_split_debug_info
31381 ? dl_section_ref
31382 : debug_skeleton_line_section_label));
31383
31384 output_comdat_type_unit (ctnode);
31385 *slot = ctnode;
31386 }
31387
31388 if (dwarf_split_debug_info)
31389 {
31390 int mark;
31391 struct md5_ctx ctx;
31392
31393 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31394 index_rnglists ();
31395
31396 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31397 md5_init_ctx (&ctx);
31398 mark = 0;
31399 die_checksum (comp_unit_die (), &ctx, &mark);
31400 unmark_all_dies (comp_unit_die ());
31401 md5_finish_ctx (&ctx, checksum);
31402
31403 if (dwarf_version < 5)
31404 {
31405 /* Use the first 8 bytes of the checksum as the dwo_id,
31406 and add it to both comp-unit DIEs. */
31407 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31408 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31409 }
31410
31411 /* Add the base offset of the ranges table to the skeleton
31412 comp-unit DIE. */
31413 if (!vec_safe_is_empty (ranges_table))
31414 {
31415 if (dwarf_version >= 5)
31416 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31417 ranges_base_label);
31418 else
31419 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31420 ranges_section_label);
31421 }
31422
31423 switch_to_section (debug_addr_section);
31424 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31425 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31426 before DWARF5, didn't have a header for .debug_addr units.
31427 DWARF5 specifies a small header when address tables are used. */
31428 if (dwarf_version >= 5)
31429 {
31430 unsigned int last_idx = 0;
31431 unsigned long addrs_length;
31432
31433 addr_index_table->traverse_noresize
31434 <unsigned int *, count_index_addrs> (&last_idx);
31435 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31436
31437 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31438 dw2_asm_output_data (4, 0xffffffff,
31439 "Escape value for 64-bit DWARF extension");
31440 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31441 "Length of Address Unit");
31442 dw2_asm_output_data (2, 5, "DWARF addr version");
31443 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31444 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31445 }
31446 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31447 output_addr_table ();
31448 }
31449
31450 /* Output the main compilation unit if non-empty or if .debug_macinfo
31451 or .debug_macro will be emitted. */
31452 output_comp_unit (comp_unit_die (), have_macinfo,
31453 dwarf_split_debug_info ? checksum : NULL);
31454
31455 if (dwarf_split_debug_info && info_section_emitted)
31456 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31457
31458 /* Output the abbreviation table. */
31459 if (vec_safe_length (abbrev_die_table) != 1)
31460 {
31461 switch_to_section (debug_abbrev_section);
31462 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31463 output_abbrev_section ();
31464 }
31465
31466 /* Output location list section if necessary. */
31467 if (have_location_lists)
31468 {
31469 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31470 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31471 /* Output the location lists info. */
31472 switch_to_section (debug_loc_section);
31473 if (dwarf_version >= 5)
31474 {
31475 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31476 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31477 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31478 dw2_asm_output_data (4, 0xffffffff,
31479 "Initial length escape value indicating "
31480 "64-bit DWARF extension");
31481 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31482 "Length of Location Lists");
31483 ASM_OUTPUT_LABEL (asm_out_file, l1);
31484 output_dwarf_version ();
31485 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31486 dw2_asm_output_data (1, 0, "Segment Size");
31487 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31488 "Offset Entry Count");
31489 }
31490 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31491 if (dwarf_version >= 5 && dwarf_split_debug_info)
31492 {
31493 unsigned int save_loc_list_idx = loc_list_idx;
31494 loc_list_idx = 0;
31495 output_loclists_offsets (comp_unit_die ());
31496 gcc_assert (save_loc_list_idx == loc_list_idx);
31497 }
31498 output_location_lists (comp_unit_die ());
31499 if (dwarf_version >= 5)
31500 ASM_OUTPUT_LABEL (asm_out_file, l2);
31501 }
31502
31503 output_pubtables ();
31504
31505 /* Output the address range information if a CU (.debug_info section)
31506 was emitted. We output an empty table even if we had no functions
31507 to put in it. This because the consumer has no way to tell the
31508 difference between an empty table that we omitted and failure to
31509 generate a table that would have contained data. */
31510 if (info_section_emitted)
31511 {
31512 switch_to_section (debug_aranges_section);
31513 output_aranges ();
31514 }
31515
31516 /* Output ranges section if necessary. */
31517 if (!vec_safe_is_empty (ranges_table))
31518 {
31519 if (dwarf_version >= 5)
31520 output_rnglists (generation);
31521 else
31522 output_ranges ();
31523 }
31524
31525 /* Have to end the macro section. */
31526 if (have_macinfo)
31527 {
31528 switch_to_section (debug_macinfo_section);
31529 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31530 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31531 : debug_skeleton_line_section_label, false);
31532 dw2_asm_output_data (1, 0, "End compilation unit");
31533 }
31534
31535 /* Output the source line correspondence table. We must do this
31536 even if there is no line information. Otherwise, on an empty
31537 translation unit, we will generate a present, but empty,
31538 .debug_info section. IRIX 6.5 `nm' will then complain when
31539 examining the file. This is done late so that any filenames
31540 used by the debug_info section are marked as 'used'. */
31541 switch_to_section (debug_line_section);
31542 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31543 if (! output_asm_line_debug_info ())
31544 output_line_info (false);
31545
31546 if (dwarf_split_debug_info && info_section_emitted)
31547 {
31548 switch_to_section (debug_skeleton_line_section);
31549 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31550 output_line_info (true);
31551 }
31552
31553 /* If we emitted any indirect strings, output the string table too. */
31554 if (debug_str_hash || skeleton_debug_str_hash)
31555 output_indirect_strings ();
31556 if (debug_line_str_hash)
31557 {
31558 switch_to_section (debug_line_str_section);
31559 const enum dwarf_form form = DW_FORM_line_strp;
31560 debug_line_str_hash->traverse<enum dwarf_form,
31561 output_indirect_string> (form);
31562 }
31563
31564 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31565 symview_upper_bound = 0;
31566 if (zero_view_p)
31567 bitmap_clear (zero_view_p);
31568 }
31569
31570 /* Returns a hash value for X (which really is a variable_value_struct). */
31571
31572 inline hashval_t
31573 variable_value_hasher::hash (variable_value_struct *x)
31574 {
31575 return (hashval_t) x->decl_id;
31576 }
31577
31578 /* Return nonzero if decl_id of variable_value_struct X is the same as
31579 UID of decl Y. */
31580
31581 inline bool
31582 variable_value_hasher::equal (variable_value_struct *x, tree y)
31583 {
31584 return x->decl_id == DECL_UID (y);
31585 }
31586
31587 /* Helper function for resolve_variable_value, handle
31588 DW_OP_GNU_variable_value in one location expression.
31589 Return true if exprloc has been changed into loclist. */
31590
31591 static bool
31592 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31593 {
31594 dw_loc_descr_ref next;
31595 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31596 {
31597 next = loc->dw_loc_next;
31598 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31599 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31600 continue;
31601
31602 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31603 if (DECL_CONTEXT (decl) != current_function_decl)
31604 continue;
31605
31606 dw_die_ref ref = lookup_decl_die (decl);
31607 if (ref)
31608 {
31609 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31610 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31611 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31612 continue;
31613 }
31614 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31615 if (l == NULL)
31616 continue;
31617 if (l->dw_loc_next)
31618 {
31619 if (AT_class (a) != dw_val_class_loc)
31620 continue;
31621 switch (a->dw_attr)
31622 {
31623 /* Following attributes allow both exprloc and loclist
31624 classes, so we can change them into a loclist. */
31625 case DW_AT_location:
31626 case DW_AT_string_length:
31627 case DW_AT_return_addr:
31628 case DW_AT_data_member_location:
31629 case DW_AT_frame_base:
31630 case DW_AT_segment:
31631 case DW_AT_static_link:
31632 case DW_AT_use_location:
31633 case DW_AT_vtable_elem_location:
31634 if (prev)
31635 {
31636 prev->dw_loc_next = NULL;
31637 prepend_loc_descr_to_each (l, AT_loc (a));
31638 }
31639 if (next)
31640 add_loc_descr_to_each (l, next);
31641 a->dw_attr_val.val_class = dw_val_class_loc_list;
31642 a->dw_attr_val.val_entry = NULL;
31643 a->dw_attr_val.v.val_loc_list = l;
31644 have_location_lists = true;
31645 return true;
31646 /* Following attributes allow both exprloc and reference,
31647 so if the whole expression is DW_OP_GNU_variable_value alone
31648 we could transform it into reference. */
31649 case DW_AT_byte_size:
31650 case DW_AT_bit_size:
31651 case DW_AT_lower_bound:
31652 case DW_AT_upper_bound:
31653 case DW_AT_bit_stride:
31654 case DW_AT_count:
31655 case DW_AT_allocated:
31656 case DW_AT_associated:
31657 case DW_AT_byte_stride:
31658 if (prev == NULL && next == NULL)
31659 break;
31660 /* FALLTHRU */
31661 default:
31662 if (dwarf_strict)
31663 continue;
31664 break;
31665 }
31666 /* Create DW_TAG_variable that we can refer to. */
31667 gen_decl_die (decl, NULL_TREE, NULL,
31668 lookup_decl_die (current_function_decl));
31669 ref = lookup_decl_die (decl);
31670 if (ref)
31671 {
31672 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31673 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31674 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31675 }
31676 continue;
31677 }
31678 if (prev)
31679 {
31680 prev->dw_loc_next = l->expr;
31681 add_loc_descr (&prev->dw_loc_next, next);
31682 free_loc_descr (loc, NULL);
31683 next = prev->dw_loc_next;
31684 }
31685 else
31686 {
31687 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31688 add_loc_descr (&loc, next);
31689 next = loc;
31690 }
31691 loc = prev;
31692 }
31693 return false;
31694 }
31695
31696 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31697
31698 static void
31699 resolve_variable_value (dw_die_ref die)
31700 {
31701 dw_attr_node *a;
31702 dw_loc_list_ref loc;
31703 unsigned ix;
31704
31705 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31706 switch (AT_class (a))
31707 {
31708 case dw_val_class_loc:
31709 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31710 break;
31711 /* FALLTHRU */
31712 case dw_val_class_loc_list:
31713 loc = AT_loc_list (a);
31714 gcc_assert (loc);
31715 for (; loc; loc = loc->dw_loc_next)
31716 resolve_variable_value_in_expr (a, loc->expr);
31717 break;
31718 default:
31719 break;
31720 }
31721 }
31722
31723 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31724 temporaries in the current function. */
31725
31726 static void
31727 resolve_variable_values (void)
31728 {
31729 if (!variable_value_hash || !current_function_decl)
31730 return;
31731
31732 struct variable_value_struct *node
31733 = variable_value_hash->find_with_hash (current_function_decl,
31734 DECL_UID (current_function_decl));
31735
31736 if (node == NULL)
31737 return;
31738
31739 unsigned int i;
31740 dw_die_ref die;
31741 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31742 resolve_variable_value (die);
31743 }
31744
31745 /* Helper function for note_variable_value, handle one location
31746 expression. */
31747
31748 static void
31749 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31750 {
31751 for (; loc; loc = loc->dw_loc_next)
31752 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31753 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31754 {
31755 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31756 dw_die_ref ref = lookup_decl_die (decl);
31757 if (! ref && (flag_generate_lto || flag_generate_offload))
31758 {
31759 /* ??? This is somewhat a hack because we do not create DIEs
31760 for variables not in BLOCK trees early but when generating
31761 early LTO output we need the dw_val_class_decl_ref to be
31762 fully resolved. For fat LTO objects we'd also like to
31763 undo this after LTO dwarf output. */
31764 gcc_assert (DECL_CONTEXT (decl));
31765 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31766 gcc_assert (ctx != NULL);
31767 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31768 ref = lookup_decl_die (decl);
31769 gcc_assert (ref != NULL);
31770 }
31771 if (ref)
31772 {
31773 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31774 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31775 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31776 continue;
31777 }
31778 if (VAR_P (decl)
31779 && DECL_CONTEXT (decl)
31780 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31781 && lookup_decl_die (DECL_CONTEXT (decl)))
31782 {
31783 if (!variable_value_hash)
31784 variable_value_hash
31785 = hash_table<variable_value_hasher>::create_ggc (10);
31786
31787 tree fndecl = DECL_CONTEXT (decl);
31788 struct variable_value_struct *node;
31789 struct variable_value_struct **slot
31790 = variable_value_hash->find_slot_with_hash (fndecl,
31791 DECL_UID (fndecl),
31792 INSERT);
31793 if (*slot == NULL)
31794 {
31795 node = ggc_cleared_alloc<variable_value_struct> ();
31796 node->decl_id = DECL_UID (fndecl);
31797 *slot = node;
31798 }
31799 else
31800 node = *slot;
31801
31802 vec_safe_push (node->dies, die);
31803 }
31804 }
31805 }
31806
31807 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31808 with dw_val_class_decl_ref operand. */
31809
31810 static void
31811 note_variable_value (dw_die_ref die)
31812 {
31813 dw_die_ref c;
31814 dw_attr_node *a;
31815 dw_loc_list_ref loc;
31816 unsigned ix;
31817
31818 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31819 switch (AT_class (a))
31820 {
31821 case dw_val_class_loc_list:
31822 loc = AT_loc_list (a);
31823 gcc_assert (loc);
31824 if (!loc->noted_variable_value)
31825 {
31826 loc->noted_variable_value = 1;
31827 for (; loc; loc = loc->dw_loc_next)
31828 note_variable_value_in_expr (die, loc->expr);
31829 }
31830 break;
31831 case dw_val_class_loc:
31832 note_variable_value_in_expr (die, AT_loc (a));
31833 break;
31834 default:
31835 break;
31836 }
31837
31838 /* Mark children. */
31839 FOR_EACH_CHILD (die, c, note_variable_value (c));
31840 }
31841
31842 /* Perform any cleanups needed after the early debug generation pass
31843 has run. */
31844
31845 static void
31846 dwarf2out_early_finish (const char *filename)
31847 {
31848 set_early_dwarf s;
31849 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31850
31851 /* PCH might result in DW_AT_producer string being restored from the
31852 header compilation, so always fill it with empty string initially
31853 and overwrite only here. */
31854 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31855 producer_string = gen_producer_string ();
31856 producer->dw_attr_val.v.val_str->refcount--;
31857 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31858
31859 /* Add the name for the main input file now. We delayed this from
31860 dwarf2out_init to avoid complications with PCH. */
31861 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31862 add_comp_dir_attribute (comp_unit_die ());
31863
31864 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31865 DW_AT_comp_dir into .debug_line_str section. */
31866 if (!output_asm_line_debug_info ()
31867 && dwarf_version >= 5
31868 && DWARF5_USE_DEBUG_LINE_STR)
31869 {
31870 for (int i = 0; i < 2; i++)
31871 {
31872 dw_attr_node *a = get_AT (comp_unit_die (),
31873 i ? DW_AT_comp_dir : DW_AT_name);
31874 if (a == NULL
31875 || AT_class (a) != dw_val_class_str
31876 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31877 continue;
31878
31879 if (! debug_line_str_hash)
31880 debug_line_str_hash
31881 = hash_table<indirect_string_hasher>::create_ggc (10);
31882
31883 struct indirect_string_node *node
31884 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31885 set_indirect_string (node);
31886 node->form = DW_FORM_line_strp;
31887 a->dw_attr_val.v.val_str->refcount--;
31888 a->dw_attr_val.v.val_str = node;
31889 }
31890 }
31891
31892 /* With LTO early dwarf was really finished at compile-time, so make
31893 sure to adjust the phase after annotating the LTRANS CU DIE. */
31894 if (in_lto_p)
31895 {
31896 early_dwarf_finished = true;
31897 if (dump_file)
31898 {
31899 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
31900 print_die (comp_unit_die (), dump_file);
31901 }
31902 return;
31903 }
31904
31905 /* Walk through the list of incomplete types again, trying once more to
31906 emit full debugging info for them. */
31907 retry_incomplete_types ();
31908
31909 /* The point here is to flush out the limbo list so that it is empty
31910 and we don't need to stream it for LTO. */
31911 flush_limbo_die_list ();
31912
31913 gen_scheduled_generic_parms_dies ();
31914 gen_remaining_tmpl_value_param_die_attribute ();
31915
31916 /* Add DW_AT_linkage_name for all deferred DIEs. */
31917 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31918 {
31919 tree decl = node->created_for;
31920 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31921 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31922 ended up in deferred_asm_name before we knew it was
31923 constant and never written to disk. */
31924 && DECL_ASSEMBLER_NAME (decl))
31925 {
31926 add_linkage_attr (node->die, decl);
31927 move_linkage_attr (node->die);
31928 }
31929 }
31930 deferred_asm_name = NULL;
31931
31932 if (flag_eliminate_unused_debug_types)
31933 prune_unused_types ();
31934
31935 /* Generate separate COMDAT sections for type DIEs. */
31936 if (use_debug_types)
31937 {
31938 break_out_comdat_types (comp_unit_die ());
31939
31940 /* Each new type_unit DIE was added to the limbo die list when created.
31941 Since these have all been added to comdat_type_list, clear the
31942 limbo die list. */
31943 limbo_die_list = NULL;
31944
31945 /* For each new comdat type unit, copy declarations for incomplete
31946 types to make the new unit self-contained (i.e., no direct
31947 references to the main compile unit). */
31948 for (comdat_type_node *ctnode = comdat_type_list;
31949 ctnode != NULL; ctnode = ctnode->next)
31950 copy_decls_for_unworthy_types (ctnode->root_die);
31951 copy_decls_for_unworthy_types (comp_unit_die ());
31952
31953 /* In the process of copying declarations from one unit to another,
31954 we may have left some declarations behind that are no longer
31955 referenced. Prune them. */
31956 prune_unused_types ();
31957 }
31958
31959 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31960 with dw_val_class_decl_ref operand. */
31961 note_variable_value (comp_unit_die ());
31962 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31963 note_variable_value (node->die);
31964 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31965 ctnode = ctnode->next)
31966 note_variable_value (ctnode->root_die);
31967 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31968 note_variable_value (node->die);
31969
31970 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31971 both the main_cu and all skeleton TUs. Making this call unconditional
31972 would end up either adding a second copy of the AT_pubnames attribute, or
31973 requiring a special case in add_top_level_skeleton_die_attrs. */
31974 if (!dwarf_split_debug_info)
31975 add_AT_pubnames (comp_unit_die ());
31976
31977 /* The early debug phase is now finished. */
31978 early_dwarf_finished = true;
31979 if (dump_file)
31980 {
31981 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
31982 print_die (comp_unit_die (), dump_file);
31983 }
31984
31985 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
31986 if ((!flag_generate_lto && !flag_generate_offload)
31987 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
31988 copy_lto_debug_sections operation of the simple object support in
31989 libiberty is not implemented for them yet. */
31990 || TARGET_PECOFF || TARGET_COFF)
31991 return;
31992
31993 /* Now as we are going to output for LTO initialize sections and labels
31994 to the LTO variants. We don't need a random-seed postfix as other
31995 LTO sections as linking the LTO debug sections into one in a partial
31996 link is fine. */
31997 init_sections_and_labels (true);
31998
31999 /* The output below is modeled after dwarf2out_finish with all
32000 location related output removed and some LTO specific changes.
32001 Some refactoring might make both smaller and easier to match up. */
32002
32003 /* Traverse the DIE's and add add sibling attributes to those DIE's
32004 that have children. */
32005 add_sibling_attributes (comp_unit_die ());
32006 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32007 add_sibling_attributes (node->die);
32008 for (comdat_type_node *ctnode = comdat_type_list;
32009 ctnode != NULL; ctnode = ctnode->next)
32010 add_sibling_attributes (ctnode->root_die);
32011
32012 /* AIX Assembler inserts the length, so adjust the reference to match the
32013 offset expected by debuggers. */
32014 strcpy (dl_section_ref, debug_line_section_label);
32015 if (XCOFF_DEBUGGING_INFO)
32016 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32017
32018 if (debug_info_level >= DINFO_LEVEL_TERSE)
32019 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32020
32021 if (have_macinfo)
32022 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32023 macinfo_section_label);
32024
32025 save_macinfo_strings ();
32026
32027 if (dwarf_split_debug_info)
32028 {
32029 unsigned int index = 0;
32030 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32031 }
32032
32033 /* Output all of the compilation units. We put the main one last so that
32034 the offsets are available to output_pubnames. */
32035 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32036 output_comp_unit (node->die, 0, NULL);
32037
32038 hash_table<comdat_type_hasher> comdat_type_table (100);
32039 for (comdat_type_node *ctnode = comdat_type_list;
32040 ctnode != NULL; ctnode = ctnode->next)
32041 {
32042 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32043
32044 /* Don't output duplicate types. */
32045 if (*slot != HTAB_EMPTY_ENTRY)
32046 continue;
32047
32048 /* Add a pointer to the line table for the main compilation unit
32049 so that the debugger can make sense of DW_AT_decl_file
32050 attributes. */
32051 if (debug_info_level >= DINFO_LEVEL_TERSE)
32052 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32053 (!dwarf_split_debug_info
32054 ? debug_line_section_label
32055 : debug_skeleton_line_section_label));
32056
32057 output_comdat_type_unit (ctnode);
32058 *slot = ctnode;
32059 }
32060
32061 /* Stick a unique symbol to the main debuginfo section. */
32062 compute_comp_unit_symbol (comp_unit_die ());
32063
32064 /* Output the main compilation unit. We always need it if only for
32065 the CU symbol. */
32066 output_comp_unit (comp_unit_die (), true, NULL);
32067
32068 /* Output the abbreviation table. */
32069 if (vec_safe_length (abbrev_die_table) != 1)
32070 {
32071 switch_to_section (debug_abbrev_section);
32072 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32073 output_abbrev_section ();
32074 }
32075
32076 /* Have to end the macro section. */
32077 if (have_macinfo)
32078 {
32079 /* We have to save macinfo state if we need to output it again
32080 for the FAT part of the object. */
32081 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32082 if (flag_fat_lto_objects)
32083 macinfo_table = macinfo_table->copy ();
32084
32085 switch_to_section (debug_macinfo_section);
32086 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32087 output_macinfo (debug_line_section_label, true);
32088 dw2_asm_output_data (1, 0, "End compilation unit");
32089
32090 if (flag_fat_lto_objects)
32091 {
32092 vec_free (macinfo_table);
32093 macinfo_table = saved_macinfo_table;
32094 }
32095 }
32096
32097 /* Emit a skeleton debug_line section. */
32098 switch_to_section (debug_line_section);
32099 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32100 output_line_info (true);
32101
32102 /* If we emitted any indirect strings, output the string table too. */
32103 if (debug_str_hash || skeleton_debug_str_hash)
32104 output_indirect_strings ();
32105 if (debug_line_str_hash)
32106 {
32107 switch_to_section (debug_line_str_section);
32108 const enum dwarf_form form = DW_FORM_line_strp;
32109 debug_line_str_hash->traverse<enum dwarf_form,
32110 output_indirect_string> (form);
32111 }
32112
32113 /* Switch back to the text section. */
32114 switch_to_section (text_section);
32115 }
32116
32117 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32118 within the same process. For use by toplev::finalize. */
32119
32120 void
32121 dwarf2out_c_finalize (void)
32122 {
32123 last_var_location_insn = NULL;
32124 cached_next_real_insn = NULL;
32125 used_rtx_array = NULL;
32126 incomplete_types = NULL;
32127 debug_info_section = NULL;
32128 debug_skeleton_info_section = NULL;
32129 debug_abbrev_section = NULL;
32130 debug_skeleton_abbrev_section = NULL;
32131 debug_aranges_section = NULL;
32132 debug_addr_section = NULL;
32133 debug_macinfo_section = NULL;
32134 debug_line_section = NULL;
32135 debug_skeleton_line_section = NULL;
32136 debug_loc_section = NULL;
32137 debug_pubnames_section = NULL;
32138 debug_pubtypes_section = NULL;
32139 debug_str_section = NULL;
32140 debug_line_str_section = NULL;
32141 debug_str_dwo_section = NULL;
32142 debug_str_offsets_section = NULL;
32143 debug_ranges_section = NULL;
32144 debug_frame_section = NULL;
32145 fde_vec = NULL;
32146 debug_str_hash = NULL;
32147 debug_line_str_hash = NULL;
32148 skeleton_debug_str_hash = NULL;
32149 dw2_string_counter = 0;
32150 have_multiple_function_sections = false;
32151 text_section_used = false;
32152 cold_text_section_used = false;
32153 cold_text_section = NULL;
32154 current_unit_personality = NULL;
32155
32156 early_dwarf = false;
32157 early_dwarf_finished = false;
32158
32159 next_die_offset = 0;
32160 single_comp_unit_die = NULL;
32161 comdat_type_list = NULL;
32162 limbo_die_list = NULL;
32163 file_table = NULL;
32164 decl_die_table = NULL;
32165 common_block_die_table = NULL;
32166 decl_loc_table = NULL;
32167 call_arg_locations = NULL;
32168 call_arg_loc_last = NULL;
32169 call_site_count = -1;
32170 tail_call_site_count = -1;
32171 cached_dw_loc_list_table = NULL;
32172 abbrev_die_table = NULL;
32173 delete dwarf_proc_stack_usage_map;
32174 dwarf_proc_stack_usage_map = NULL;
32175 line_info_label_num = 0;
32176 cur_line_info_table = NULL;
32177 text_section_line_info = NULL;
32178 cold_text_section_line_info = NULL;
32179 separate_line_info = NULL;
32180 info_section_emitted = false;
32181 pubname_table = NULL;
32182 pubtype_table = NULL;
32183 macinfo_table = NULL;
32184 ranges_table = NULL;
32185 ranges_by_label = NULL;
32186 rnglist_idx = 0;
32187 have_location_lists = false;
32188 loclabel_num = 0;
32189 poc_label_num = 0;
32190 last_emitted_file = NULL;
32191 label_num = 0;
32192 tmpl_value_parm_die_table = NULL;
32193 generic_type_instances = NULL;
32194 frame_pointer_fb_offset = 0;
32195 frame_pointer_fb_offset_valid = false;
32196 base_types.release ();
32197 XDELETEVEC (producer_string);
32198 producer_string = NULL;
32199 }
32200
32201 #include "gt-dwarf2out.h"